aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog54
-rw-r--r--gcc/DATESTAMP2
-rw-r--r--gcc/c-family/ChangeLog5
-rw-r--r--gcc/c/ChangeLog16
-rw-r--r--gcc/cobol/ChangeLog55
-rw-r--r--gcc/cobol/gcobolspec.cc38
-rw-r--r--gcc/cobol/util.cc19
-rw-r--r--gcc/config.in6
-rw-r--r--gcc/config/avr/avr-passes.cc54
-rwxr-xr-xgcc/configure2
-rw-r--r--gcc/configure.ac2
-rw-r--r--gcc/cp/ChangeLog29
-rw-r--r--gcc/cp/decl2.cc22
-rw-r--r--gcc/cp/pt.cc11
-rw-r--r--gcc/cp/typeck.cc20
-rw-r--r--gcc/d/ChangeLog8
-rw-r--r--gcc/fortran/ChangeLog7
-rw-r--r--gcc/fortran/resolve.cc16
-rw-r--r--gcc/rust/ChangeLog62
-rw-r--r--gcc/rust/Make-lang.in2
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/.cargo/config.toml5
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/.cargo-checksum.json1
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/CODE_OF_CONDUCT.md40
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/Cargo.toml29
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-APACHE201
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-MIT23
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/README.md44
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/RELEASES.md26
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/borrow_check.rs115
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/graspan1.rs62
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/join.rs180
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/lib.rs567
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/map.rs13
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/test.rs195
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/treefrog.rs661
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/.cargo-checksum.json1
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/CHANGELOG.md324
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/Cargo.toml139
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-APACHE201
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-MIT25
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/README.md130
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/benches/value.rs27
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/__private_api.rs123
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/error.rs94
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/key.rs143
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/mod.rs265
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/source.rs514
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/value.rs1394
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/lib.rs1878
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/macros.rs367
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/serde.rs397
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/triagebot.toml1
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/.cargo-checksum.json1
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/Cargo.toml29
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/README.md6
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/facts.rs129
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/lib.rs16
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/datafrog_opt.rs495
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/initialization.rs284
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/liveness.rs170
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/location_insensitive.rs156
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/mod.rs614
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/naive.rs299
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/.cargo-checksum.json1
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/CODE_OF_CONDUCT.md40
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/Cargo.toml25
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-APACHE201
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-MIT23
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/README.md38
-rw-r--r--gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/src/lib.rs148
-rw-r--r--gcc/testsuite/ChangeLog105
-rw-r--r--gcc/testsuite/g++.dg/conversion/ptrmem10.C14
-rw-r--r--gcc/testsuite/g++.dg/cpp0x/alias-decl-variadic3.C22
-rw-r--r--gcc/testsuite/g++.dg/template/linkage7.C17
-rw-r--r--gcc/testsuite/gcc.target/i386/apx-ndd-tls-1b.c7
-rw-r--r--gcc/testsuite/gfortran.dg/allocate_assumed_charlen_5.f9017
-rw-r--r--gcc/testsuite/gfortran.dg/deferred_character_18.f903
-rw-r--r--include/ChangeLog6
-rw-r--r--libcpp/po/ChangeLog4
-rw-r--r--libiberty/ChangeLog9
-rw-r--r--libphobos/ChangeLog32
-rw-r--r--libphobos/src/MERGE2
-rw-r--r--libphobos/src/std/getopt.d73
-rw-r--r--libphobos/src/std/json.d44
-rw-r--r--libstdc++-v3/ChangeLog65
85 files changed, 11566 insertions, 144 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 736c5f8..0f50aff 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,57 @@
+2025-03-18 Georg-Johann Lay <avr@gjlay.de>
+
+ PR target/119355
+ * config/avr/avr-passes.cc (memento_t::apply): Only
+ read values[p.arg] when it is actually used.
+
+2025-03-18 Iain Sandoe <iain@sandoe.co.uk>
+
+ PR cobol/119301
+ * config.in: Regenerate.
+ * configure: Regenerate.
+ * configure.ac: Add check for get_current_dir_name.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/116545
+ * doc/extend.texi (musttail statement attribute): Document
+ that musttail GNU attribute can be used as well.
+
+2025-03-18 Michael Matz <matz@suse.de>
+
+ * config/rs6000/rs6000.opt.urls: Regenerate.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ * doc/sourcebuild.texi (dg-output-file): Document.
+
+2025-03-18 Andrew Pinski <quic_apinski@quicinc.com>
+
+ * gimple-ssa-sccopy.cc (scc_copy_prop::replace_scc_by_value): Dump
+ what is being replaced with what.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR rtl-optimization/119307
+ * lra.cc (lra_rtx_hash): Handle SUBREG.
+
+2025-03-18 Richard Biener <rguenther@suse.de>
+
+ PR debug/101533
+ * dwarf2out.cc (gen_type_die_with_usage): When we have
+ output the typedef already do nothing for a typedef variant.
+ Do not set TREE_ASM_WRITTEN on the type.
+
+2025-03-18 Jeff Law <jlaw@ventanamicro.com>
+
+ * config/riscv/riscv.md (equality shifted-arith splitter): Do not
+ create op AND -1 as it won't be cleaned up post-reload.
+
+2025-03-18 Andrew Pinski <quic_apinski@quicinc.com>
+
+ * configure: Regenerate.
+ * configure.ac: s/gcc_cv_ld64_macosx_version_min/gcc_cv_ld64_macos_version_min/.
+
2025-03-17 Jeff Law <jlaw@ventanamicro.com>
* config/riscv/bitmanip.md (*<or_optab>i<mode>_extrabit): Reject cases
diff --git a/gcc/DATESTAMP b/gcc/DATESTAMP
index c0dd068..22523f5 100644
--- a/gcc/DATESTAMP
+++ b/gcc/DATESTAMP
@@ -1 +1 @@
-20250318
+20250319
diff --git a/gcc/c-family/ChangeLog b/gcc/c-family/ChangeLog
index 3db9088..01ce574 100644
--- a/gcc/c-family/ChangeLog
+++ b/gcc/c-family/ChangeLog
@@ -1,3 +1,8 @@
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/116545
+ * c-attribs.cc (c_common_clang_attributes): Add musttail.
+
2025-03-14 Jakub Jelinek <jakub@redhat.com>
PR target/119120
diff --git a/gcc/c/ChangeLog b/gcc/c/ChangeLog
index 54775d4..c2528f6 100644
--- a/gcc/c/ChangeLog
+++ b/gcc/c/ChangeLog
@@ -1,3 +1,19 @@
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/119311
+ * c-parser.cc (c_parser_if_body): Pass result of c_parser_all_labels
+ as last argument to c_parser_statement_after_labels.
+ (c_parser_else_body): Likewise.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/116545
+ * c-parser.cc (c_parser_declaration_or_fndef): Parse
+ __attribute__((musttail)) return.
+ (c_parser_handle_musttail): Diagnose attribute arguments.
+ (c_parser_statement_after_labels): Parse
+ __attribute__((musttail)) return.
+
2025-03-11 Jakub Jelinek <jakub@redhat.com>
PR c/117178
diff --git a/gcc/cobol/ChangeLog b/gcc/cobol/ChangeLog
index 9dfda4c..be421d2 100644
--- a/gcc/cobol/ChangeLog
+++ b/gcc/cobol/ChangeLog
@@ -1,3 +1,58 @@
+2025-03-18 Iain Sandoe <iain@sandoe.co.uk>
+
+ * gcobolspec.cc (append_rpath): Remove.
+ (lang_specific_driver): Remove hard-wired rpath and library
+ names.
+
+2025-03-18 Iain Sandoe <iain@sandoe.co.uk>
+
+ PR cobol/119301
+ * util.cc: Check for the availability of get_current_dir_name
+ snf fall back to getcwd() if it is not present on the host.
+
+2025-03-18 Richard Biener <rguenther@suse.de>
+
+ * gengen.cc (gg_finalize_function): Dump to TDI_original.
+
+2025-03-18 Bob Dubner <rdubner@symas.com>
+
+ * cdf.y: Make compatible with C++14.
+ * copybook.h: Likewise.
+ * dts.h: Likewise.
+ * except.cc: Likewise.
+ * genapi.cc: Likewise.
+ * genutil.cc: Likewise.
+ * genutil.h: Likewise.
+ * lexio.cc: Likewise.
+ * parse.y: Likewise.
+ * parse_ante.h: Likewise.
+ * show_parse.h: Likewise.
+ * symbols.cc: Likewise.
+ * symbols.h: Likewise.
+ * util.cc: Likewise.
+
+2025-03-18 Matthias Klose <doko@ubuntu.com>
+
+ * Make-lang.in (GCOBC_TARGET_INSTALL_NAME, gcobol-cross): New.
+ (cobol.all.cross): Depend on gcobol-cross.
+ (cobol.install-common): Adjust install for the cross build.
+ (cobol.uninstall): Use *_INSTALL_NAME for uninstall.
+
+2025-03-18 Iain Sandoe <iain@sandoe.co.uk>
+
+ * util.cc (cbl_field_t::report_invalid_initial_value): Avoid
+ auto here and specify const char *.
+
+2025-03-18 Jose E. Marchesi <jose.marchesi@oracle.com>
+
+ * cdf-copy.cc (copybook_elem_t::open_file): Use ldirname rather
+ than dirname.
+
+2025-03-18 Iain Sandoe <iain@sandoe.co.uk>
+
+ * parse.y: Remove c++ header includes appearing after
+ system.h.
+
2025-03-17 Bob Dubner <rdubner@symas.com>
PR cobol/119213
diff --git a/gcc/cobol/gcobolspec.cc b/gcc/cobol/gcobolspec.cc
index 5bd6853..4ae8e2c 100644
--- a/gcc/cobol/gcobolspec.cc
+++ b/gcc/cobol/gcobolspec.cc
@@ -141,21 +141,6 @@ append_rdynamic()
}
static void
-append_rpath()
- {
-#ifdef EXEC_LIB
- // Handing append_option() something on the stack Just Doesn't Work
- if( strlen(EXEC_LIB) )
- {
- static char ach[256];
- snprintf(ach, sizeof(ach), "-rpath=%s", EXEC_LIB);
- append_option (OPT_Wl_, ach, 1);
- }
-#endif
- return;
- }
-
-static void
append_allow_multiple_definition()
{
append_option (OPT_Wl_, "--allow-multiple-definition", 1);
@@ -250,9 +235,6 @@ lang_specific_driver (struct cl_decoded_option **in_decoded_options,
int index_libgcobol_a = 0;
- // This is for the -Wl,-rpath=<EXEC_LIB>
- bool need_rpath = true;
-
bool no_files_error = true;
#ifdef NOISY
@@ -339,16 +321,6 @@ lang_specific_driver (struct cl_decoded_option **in_decoded_options,
{
need_allow_multiple_definition = false;
}
- if( strstr(decoded_options[i].orig_option_with_args_text, "-rpath") )
- {
- // The caller is doing something with -rpath. Assume they know what
- // they are doing
-
- // On second thought, always install our rpath. It goes at the end,
- // so if the user specifies and rpath that they prefer, it'll get
- // taken first.
- need_rpath = true;
- }
break;
case OPT_nostdlib:
@@ -616,12 +588,9 @@ lang_specific_driver (struct cl_decoded_option **in_decoded_options,
if( need_libgcobol )
{
-#ifdef EXEC_LIB
- append_option(OPT_L, EXEC_LIB, 1);
-#endif
add_arg_lib(COBOL_LIBRARY, static_libgcobol);
}
- if( need_libmath )
+ if( need_libmath)
{
add_arg_lib(MATH_LIBRARY, static_in_general);
}
@@ -649,11 +618,6 @@ lang_specific_driver (struct cl_decoded_option **in_decoded_options,
append_allow_multiple_definition();
}
- if( need_rpath && (n_infiles || n_outfiles) )
- {
- append_rpath();
- }
-
if( prior_main )
{
char ach[] = "\"-main\" without a source file";
diff --git a/gcc/cobol/util.cc b/gcc/cobol/util.cc
index 62ecd98..101a0a0 100644
--- a/gcc/cobol/util.cc
+++ b/gcc/cobol/util.cc
@@ -72,6 +72,25 @@ extern int yyparse(void);
extern int demonstration_administrator(int N);
+#if !defined (HAVE_GET_CURRENT_DIR_NAME)
+/* Posix platforms might not have get_current_dir_name but should have
+ getcwd() and PATH_MAX. */
+#if __has_include (<limits.h>)
+# include <limits.h>
+#endif
+/* The Hurd doesn't define PATH_MAX. */
+#if !defined (PATH_MAX) && defined(__GNU__)
+# define PATH_MAX 4096
+#endif
+static inline char *
+get_current_dir_name ()
+{
+ /* Use libiberty's allocator here. */
+ char *buf = (char *) xmalloc (PATH_MAX);
+ return getcwd (buf, PATH_MAX);
+}
+#endif
+
const char *
symbol_type_str( enum symbol_type_t type )
{
diff --git a/gcc/config.in b/gcc/config.in
index 0b46faa..bc60d36 100644
--- a/gcc/config.in
+++ b/gcc/config.in
@@ -1624,6 +1624,12 @@
#endif
+/* Define to 1 if you have the `get_current_dir_name' function. */
+#ifndef USED_FOR_TARGET
+#undef HAVE_GET_CURRENT_DIR_NAME
+#endif
+
+
/* Define to 1 if using GNU as. */
#ifndef USED_FOR_TARGET
#undef HAVE_GNU_AS
diff --git a/gcc/config/avr/avr-passes.cc b/gcc/config/avr/avr-passes.cc
index e32c467..184619a 100644
--- a/gcc/config/avr/avr-passes.cc
+++ b/gcc/config/avr/avr-passes.cc
@@ -2205,9 +2205,6 @@ memento_t::apply (const ply_t &p)
}
else if (p.size == 1)
{
- int x = values[p.regno];
- int y = values[p.arg];
-
switch (p.code)
{
default:
@@ -2234,29 +2231,42 @@ memento_t::apply (const ply_t &p)
gcc_unreachable ();
break;
-#define DO_ARITH(n_args, code, expr) \
+#define DO_ARITH1(code, expr) \
+ case code: \
+ gcc_assert (knows (p.regno)); \
+ { \
+ const int x = values[p.regno]; \
+ set_value (p.regno, expr); \
+ } \
+ break
+
+#define DO_ARITH2(code, expr) \
case code: \
gcc_assert (knows (p.regno)); \
- if (n_args == 2) \
- gcc_assert (knows (p.arg)); \
- set_value (p.regno, expr); \
+ gcc_assert (knows (p.arg)); \
+ { \
+ const int x = values[p.regno]; \
+ const int y = values[p.arg]; \
+ set_value (p.regno, expr); \
+ } \
break
- DO_ARITH (1, NEG, -x);
- DO_ARITH (1, NOT, ~x);
- DO_ARITH (1, PRE_INC, x + 1);
- DO_ARITH (1, PRE_DEC, x - 1);
- DO_ARITH (1, ROTATE, (x << 4) | (x >> 4));
- DO_ARITH (1, ASHIFT, x << 1);
- DO_ARITH (1, LSHIFTRT, x >> 1);
- DO_ARITH (1, ASHIFTRT, (x >> 1) | (x & 0x80));
-
- DO_ARITH (2, AND, x & y);
- DO_ARITH (2, IOR, x | y);
- DO_ARITH (2, XOR, x ^ y);
- DO_ARITH (2, PLUS, x + y);
- DO_ARITH (2, MINUS, x - y);
-#undef DO_ARITH
+ DO_ARITH1 (NEG, -x);
+ DO_ARITH1 (NOT, ~x);
+ DO_ARITH1 (PRE_INC, x + 1);
+ DO_ARITH1 (PRE_DEC, x - 1);
+ DO_ARITH1 (ROTATE, (x << 4) | (x >> 4));
+ DO_ARITH1 (ASHIFT, x << 1);
+ DO_ARITH1 (LSHIFTRT, x >> 1);
+ DO_ARITH1 (ASHIFTRT, (x >> 1) | (x & 0x80));
+
+ DO_ARITH2 (AND, x & y);
+ DO_ARITH2 (IOR, x | y);
+ DO_ARITH2 (XOR, x ^ y);
+ DO_ARITH2 (PLUS, x + y);
+ DO_ARITH2 (MINUS, x - y);
+#undef DO_ARITH1
+#undef DO_ARITH2
}
} // size == 1
else
diff --git a/gcc/configure b/gcc/configure
index 0ef47a9..ae1d349 100755
--- a/gcc/configure
+++ b/gcc/configure
@@ -10640,7 +10640,7 @@ for ac_func in times clock kill getrlimit setrlimit atoq \
popen sysconf strsignal getrusage nl_langinfo \
gettimeofday mbstowcs wcswidth mmap posix_fallocate setlocale \
clearerr_unlocked feof_unlocked ferror_unlocked fflush_unlocked fgetc_unlocked fgets_unlocked fileno_unlocked fprintf_unlocked fputc_unlocked fputs_unlocked fread_unlocked fwrite_unlocked getchar_unlocked getc_unlocked putchar_unlocked putc_unlocked madvise mallinfo mallinfo2 fstatat getauxval \
- clock_gettime munmap msync
+ clock_gettime munmap msync get_current_dir_name
do :
as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
ac_fn_cxx_check_func "$LINENO" "$ac_func" "$as_ac_var"
diff --git a/gcc/configure.ac b/gcc/configure.ac
index 4ac419a..8ef11e3 100644
--- a/gcc/configure.ac
+++ b/gcc/configure.ac
@@ -1574,7 +1574,7 @@ AC_CHECK_FUNCS(times clock kill getrlimit setrlimit atoq \
popen sysconf strsignal getrusage nl_langinfo \
gettimeofday mbstowcs wcswidth mmap posix_fallocate setlocale \
gcc_UNLOCKED_FUNCS madvise mallinfo mallinfo2 fstatat getauxval \
- clock_gettime munmap msync)
+ clock_gettime munmap msync get_current_dir_name)
# At least for glibc, clock_gettime is in librt. But don't pull that
# in if it still doesn't give us the function we want.
diff --git a/gcc/cp/ChangeLog b/gcc/cp/ChangeLog
index 612e57d..d59d242 100644
--- a/gcc/cp/ChangeLog
+++ b/gcc/cp/ChangeLog
@@ -1,3 +1,32 @@
+2025-03-18 Marek Polacek <polacek@redhat.com>
+
+ PR c++/119344
+ * typeck.cc (cp_build_binary_op): Use cp_save_expr instead of save_expr.
+
+2025-03-18 Jason Merrill <jason@redhat.com>
+
+ PR c++/119194
+ * decl2.cc (min_vis_expr_r) [ADDR_EXPR]: New case.
+
+2025-03-18 Marek Polacek <polacek@redhat.com>
+
+ PR c++/118104
+ * pt.cc (use_pack_expansion_extra_args_p): Remove an assert.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/116545
+ * parser.cc (cp_parser_statement): Call cp_parser_attributes_opt
+ rather than cp_parser_std_attribute_spec_seq.
+ (cp_parser_jump_statement): Diagnose gnu::musttail attributes
+ with no arguments.
+
+2025-03-18 Patrick Palka <ppalka@redhat.com>
+
+ PR c++/119233
+ * pt.cc (mark_template_arguments_used): Also handle member
+ function pointers.
+
2025-03-14 Jakub Jelinek <jakub@redhat.com>
PR target/119120
diff --git a/gcc/cp/decl2.cc b/gcc/cp/decl2.cc
index 4a9fb1c..a3149f2 100644
--- a/gcc/cp/decl2.cc
+++ b/gcc/cp/decl2.cc
@@ -2843,16 +2843,28 @@ min_vis_expr_r (tree *tp, int */*walk_subtrees*/, void *data)
tpvis = type_visibility (TREE_TYPE (t));
break;
+ case ADDR_EXPR:
+ t = TREE_OPERAND (t, 0);
+ if (VAR_P (t))
+ /* If a variable has its address taken, the lvalue-rvalue conversion is
+ not applied, so skip that case. */
+ goto addressable;
+ break;
+
case VAR_DECL:
case FUNCTION_DECL:
if (decl_constant_var_p (t))
/* The ODR allows definitions in different TUs to refer to distinct
constant variables with internal or no linkage, so such a reference
- shouldn't affect visibility (PR110323). FIXME but only if the
- lvalue-rvalue conversion is applied. We still want to restrict
- visibility according to the type of the declaration however. */
- tpvis = type_visibility (TREE_TYPE (t));
- else if (! TREE_PUBLIC (t))
+ shouldn't affect visibility if the lvalue-rvalue conversion is
+ applied (PR110323). We still want to restrict visibility according
+ to the type of the declaration however. */
+ {
+ tpvis = type_visibility (TREE_TYPE (t));
+ break;
+ }
+ addressable:
+ if (! TREE_PUBLIC (t))
tpvis = VISIBILITY_ANON;
else
tpvis = DECL_VISIBILITY (t);
diff --git a/gcc/cp/pt.cc b/gcc/cp/pt.cc
index 50eda18..538ff22 100644
--- a/gcc/cp/pt.cc
+++ b/gcc/cp/pt.cc
@@ -13180,7 +13180,16 @@ use_pack_expansion_extra_args_p (tree t,
if (has_expansion_arg && has_non_expansion_arg)
{
- gcc_checking_assert (false);
+ /* We can get here with:
+
+ template <class... Ts> struct X {
+ template <class... Us> using Y = Z<void(Ts, Us)...>;
+ };
+ template <class A, class... P>
+ using foo = X<int, int>::Y<A, P...>;
+
+ where we compare int and A and then the second int and P...,
+ whose expansion-ness doesn't match, but that's OK. */
return true;
}
}
diff --git a/gcc/cp/typeck.cc b/gcc/cp/typeck.cc
index 4b382b9..c8e4441 100644
--- a/gcc/cp/typeck.cc
+++ b/gcc/cp/typeck.cc
@@ -5480,7 +5480,7 @@ cp_build_binary_op (const op_location_t &location,
case stv_firstarg:
{
op0 = convert (TREE_TYPE (type1), op0);
- op0 = save_expr (op0);
+ op0 = cp_save_expr (op0);
op0 = build_vector_from_val (type1, op0);
orig_type0 = type0 = TREE_TYPE (op0);
code0 = TREE_CODE (type0);
@@ -5490,7 +5490,7 @@ cp_build_binary_op (const op_location_t &location,
case stv_secondarg:
{
op1 = convert (TREE_TYPE (type0), op1);
- op1 = save_expr (op1);
+ op1 = cp_save_expr (op1);
op1 = build_vector_from_val (type0, op1);
orig_type1 = type1 = TREE_TYPE (op1);
code1 = TREE_CODE (type1);
@@ -6019,9 +6019,9 @@ cp_build_binary_op (const op_location_t &location,
return error_mark_node;
if (TREE_SIDE_EFFECTS (op0))
- op0 = save_expr (op0);
+ op0 = cp_save_expr (op0);
if (TREE_SIDE_EFFECTS (op1))
- op1 = save_expr (op1);
+ op1 = cp_save_expr (op1);
pfn0 = pfn_from_ptrmemfunc (op0);
pfn0 = cp_fully_fold (pfn0);
@@ -6262,8 +6262,8 @@ cp_build_binary_op (const op_location_t &location,
&& !processing_template_decl
&& sanitize_flags_p (SANITIZE_POINTER_COMPARE))
{
- op0 = save_expr (op0);
- op1 = save_expr (op1);
+ op0 = cp_save_expr (op0);
+ op1 = cp_save_expr (op1);
tree tt = builtin_decl_explicit (BUILT_IN_ASAN_POINTER_COMPARE);
instrument_expr = build_call_expr_loc (location, tt, 2, op0, op1);
@@ -6523,14 +6523,14 @@ cp_build_binary_op (const op_location_t &location,
return error_mark_node;
if (first_complex)
{
- op0 = save_expr (op0);
+ op0 = cp_save_expr (op0);
real = cp_build_unary_op (REALPART_EXPR, op0, true, complain);
imag = cp_build_unary_op (IMAGPART_EXPR, op0, true, complain);
switch (code)
{
case MULT_EXPR:
case TRUNC_DIV_EXPR:
- op1 = save_expr (op1);
+ op1 = cp_save_expr (op1);
imag = build2 (resultcode, real_type, imag, op1);
/* Fall through. */
case PLUS_EXPR:
@@ -6543,13 +6543,13 @@ cp_build_binary_op (const op_location_t &location,
}
else
{
- op1 = save_expr (op1);
+ op1 = cp_save_expr (op1);
real = cp_build_unary_op (REALPART_EXPR, op1, true, complain);
imag = cp_build_unary_op (IMAGPART_EXPR, op1, true, complain);
switch (code)
{
case MULT_EXPR:
- op0 = save_expr (op0);
+ op0 = cp_save_expr (op0);
imag = build2 (resultcode, real_type, op0, imag);
/* Fall through. */
case PLUS_EXPR:
diff --git a/gcc/d/ChangeLog b/gcc/d/ChangeLog
index 0724c0d..cfa41f2 100644
--- a/gcc/d/ChangeLog
+++ b/gcc/d/ChangeLog
@@ -1,3 +1,11 @@
+2025-03-18 Iain Buclaw <ibuclaw@gdcproject.org>
+
+ * dmd/MERGE: Merge upstream dmd fde0f8c40a.
+
+2025-03-18 Iain Buclaw <ibuclaw@gdcproject.org>
+
+ * dmd/MERGE: Merge upstream dmd 51be8bb729.
+
2025-03-16 Iain Buclaw <ibuclaw@gdcproject.org>
* dmd/MERGE: Merge upstream dmd 603225372b.
diff --git a/gcc/fortran/ChangeLog b/gcc/fortran/ChangeLog
index 9e5f939..a61e6f5 100644
--- a/gcc/fortran/ChangeLog
+++ b/gcc/fortran/ChangeLog
@@ -1,3 +1,10 @@
+2025-03-18 Harald Anlauf <anlauf@gmx.de>
+
+ PR fortran/119338
+ * resolve.cc (resolve_allocate_expr): Check F2003:C626: Type-spec
+ in ALLOCATE of an assumed-length character dummy argument shall be
+ an asterisk.
+
2025-03-16 Harald Anlauf <anlauf@gmx.de>
PR fortran/60560
diff --git a/gcc/fortran/resolve.cc b/gcc/fortran/resolve.cc
index d64edff..ddd9827 100644
--- a/gcc/fortran/resolve.cc
+++ b/gcc/fortran/resolve.cc
@@ -8987,6 +8987,22 @@ resolve_allocate_expr (gfc_expr *e, gfc_code *code, bool *array_alloc_wo_spec)
goto failure;
}
+ /* F2003:C626 (R623) A type-param-value in a type-spec shall be an asterisk
+ if and only if each allocate-object is a dummy argument for which the
+ corresponding type parameter is assumed. */
+ if (code->ext.alloc.ts.type == BT_CHARACTER
+ && code->ext.alloc.ts.u.cl->length != NULL
+ && e->ts.type == BT_CHARACTER && !e->ts.deferred
+ && e->ts.u.cl->length == NULL
+ && e->symtree->n.sym->attr.dummy)
+ {
+ gfc_error ("The type parameter in ALLOCATE statement with type-spec "
+ "shall be an asterisk as allocate object %qs at %L is a "
+ "dummy argument with assumed type parameter",
+ sym->name, &e->where);
+ goto failure;
+ }
+
/* Check F08:C632. */
if (code->ext.alloc.ts.type == BT_CHARACTER && !e->ts.deferred
&& !UNLIMITED_POLY (e))
diff --git a/gcc/rust/ChangeLog b/gcc/rust/ChangeLog
index d6f94f5..ba6430a 100644
--- a/gcc/rust/ChangeLog
+++ b/gcc/rust/ChangeLog
@@ -1,3 +1,65 @@
+2025-03-18 Marc Poulhiès <dkm@kataplop.net>
+
+ PR rust/119333
+ * Make-lang.in: Force offline mode for cargo
+
+2025-03-18 Arthur Cohen <arthur.cohen@embecosm.com>
+
+ * checks/errors/borrowck/ffi-polonius/.cargo/config.toml: New file, force vendored deps.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/.cargo-checksum.json: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/CODE_OF_CONDUCT.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/Cargo.toml: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-APACHE: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-MIT: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/README.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/RELEASES.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/borrow_check.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/graspan1.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/join.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/lib.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/map.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/test.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/treefrog.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/.cargo-checksum.json: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/CHANGELOG.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/Cargo.toml: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-APACHE: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-MIT: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/README.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/benches/value.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/__private_api.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/error.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/key.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/mod.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/source.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/value.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/lib.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/macros.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/src/serde.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/log/triagebot.toml: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/.cargo-checksum.json: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/Cargo.toml: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/README.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/facts.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/lib.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/datafrog_opt.rs:
+ New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/initialization.rs:
+ New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/liveness.rs:
+ New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/location_insensitive.rs:
+ New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/mod.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/naive.rs: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/.cargo-checksum.json: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/CODE_OF_CONDUCT.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/Cargo.toml: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-APACHE: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-MIT: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/README.md: New file.
+ * checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/src/lib.rs: New file.
+
2025-03-17 Muhammad Mahad <mahadtxt@gmail.com>
* typecheck/rust-hir-type-check-type.cc (TypeCheckType::visit):
diff --git a/gcc/rust/Make-lang.in b/gcc/rust/Make-lang.in
index efa6309..c892fa3 100644
--- a/gcc/rust/Make-lang.in
+++ b/gcc/rust/Make-lang.in
@@ -503,5 +503,5 @@ rust/%.o: rust/metadata/%.cc
rust/libffi_polonius.a: \
rust/checks/errors/borrowck/ffi-polonius/Cargo.toml \
$(wildcard $(srcdir)/rust/checks/errors/borrowck/ffi-polonius/src/*)
- cargo build --manifest-path $(srcdir)/rust/checks/errors/borrowck/ffi-polonius/Cargo.toml --release --target-dir rust/ffi-polonius
+ cd $(srcdir)/rust/checks/errors/borrowck/ffi-polonius/ && cargo build --offline --release --target-dir $(objdir)/rust/ffi-polonius
cp rust/ffi-polonius/release/libffi_polonius.a rust/libffi_polonius.a
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/.cargo/config.toml b/gcc/rust/checks/errors/borrowck/ffi-polonius/.cargo/config.toml
new file mode 100644
index 0000000..0236928
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/.cargo/config.toml
@@ -0,0 +1,5 @@
+[source.crates-io]
+replace-with = "vendored-sources"
+
+[source.vendored-sources]
+directory = "vendor"
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/.cargo-checksum.json b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/.cargo-checksum.json
new file mode 100644
index 0000000..80aa32c
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CODE_OF_CONDUCT.md":"edca092fde496419a9f1ba640048aa0270b62dfea576cd3175f0b53e3c230470","Cargo.toml":"c3a8ecf831d7985fafcb8e523fd2d1bf875297e1a11b750a28222793a42e0d4c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"60c181bf865b494df30968378509453719163f57a84f31a244fe69e62c342c5b","RELEASES.md":"a49128d725075bb614da3d53ea2aa2ab080bcb83ce46fc57655f6f6ecc9e2b74","examples/borrow_check.rs":"256857ed6609be8d1f3c8cf041ff8a1c0a884e8540f3156d2f3a2a2a9f73a05d","examples/graspan1.rs":"7d93ba71ff08a3667fea696d0a94e2c91e7514c304f2be8b088465cee17537fe","src/join.rs":"04eb29a02a1fd3ecf27d35a9eaabeec686bbfabdeafe13ad9ac98a622acb0f19","src/lib.rs":"7c95a63c237f48f986abd63ddfa4ed296bb5d280d245295d025fdf2f9744c2f3","src/map.rs":"93f1c7273fb67beb62a4b02201a6502bcaabf1e079aa7201a88d8e0aea6123e9","src/test.rs":"1eee5db2817a781cf8bf16744338b896252e400c150ae23ad87ce8c623acee69","src/treefrog.rs":"fe84a2bd2e36f1a48cb6b7e77a74addf218cfc881e9f6d4e7ceff4d8d97aa380"},"package":"a0afaad2b26fa326569eb264b1363e8ae3357618c43982b3f285f0774ce76b69"} \ No newline at end of file
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/CODE_OF_CONDUCT.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..d70b2b5
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/CODE_OF_CONDUCT.md
@@ -0,0 +1,40 @@
+# The Rust Code of Conduct
+
+A version of this document [can be found online](https://www.rust-lang.org/conduct.html).
+
+## Conduct
+
+**Contact**: [rust-mods@rust-lang.org](mailto:rust-mods@rust-lang.org)
+
+* We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
+* On IRC, please avoid using overtly sexual nicknames or other nicknames that might detract from a friendly, safe and welcoming environment for all.
+* Please be kind and courteous. There's no need to be mean or rude.
+* Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
+* Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
+* We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term "harassment" as including the definition in the <a href="http://citizencodeofconduct.org/">Citizen Code of Conduct</a>; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups.
+* Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the [Rust moderation team][mod_team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a safe place for you and we've got your back.
+* Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
+
+## Moderation
+
+
+These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation, please contact the [Rust moderation team][mod_team].
+
+1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
+2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
+3. Moderators will first respond to such remarks with a warning.
+4. If the warning is unheeded, the user will be "kicked," i.e., kicked out of the communication channel to cool off.
+5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
+6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
+7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, **in private**. Complaints about bans in-channel are not allowed.
+8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
+
+In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
+
+And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
+
+The enforcement policies listed above apply to all official Rust venues; including official IRC channels (#rust, #rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
+
+*Adapted from the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
+
+[mod_team]: https://www.rust-lang.org/team.html#Moderation-team
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/Cargo.toml b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/Cargo.toml
new file mode 100644
index 0000000..71bccdd
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/Cargo.toml
@@ -0,0 +1,29 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "datafrog"
+version = "2.0.1"
+authors = ["Frank McSherry <fmcsherry@me.com>", "The Rust Project Developers", "Datafrog Developers"]
+description = "Lightweight Datalog engine intended to be embedded in other Rust programs"
+readme = "README.md"
+keywords = ["datalog", "analysis"]
+license = "Apache-2.0/MIT"
+repository = "https://github.com/rust-lang-nursery/datafrog"
+[dev-dependencies.proptest]
+version = "0.8.7"
+[badges.is-it-maintained-issue-resolution]
+repository = "https://github.com/rust-lang-nursery/datafrog"
+
+[badges.is-it-maintained-open-issues]
+repository = "https://github.com/rust-lang-nursery/datafrog"
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-APACHE b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-APACHE
new file mode 100644
index 0000000..16fe87b
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-MIT b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-MIT
new file mode 100644
index 0000000..31aa793
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/README.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/README.md
new file mode 100644
index 0000000..9483584
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/README.md
@@ -0,0 +1,44 @@
+# datafrog
+
+Datafrog is a lightweight Datalog engine intended to be embedded in other Rust programs.
+
+Datafrog has no runtime, and relies on you to build and repeatedly apply the update rules.
+It tries to help you do this correctly. As an example, here is how you might write a reachability
+query using Datafrog (minus the part where we populate the `nodes` and `edges` initial relations).
+
+```rust
+extern crate datafrog;
+use datafrog::Iteration;
+
+fn main() {
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // .. some variables, ..
+ let nodes_var = iteration.variable::<(u32,u32)>("nodes");
+ let edges_var = iteration.variable::<(u32,u32)>("edges");
+
+ // .. load them with some initial values, ..
+ nodes_var.insert(nodes.into());
+ edges_var.insert(edges.into());
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // nodes(a,c) <- nodes(a,b), edges(b,c)
+ nodes_var.from_join(&nodes_var, &edges_var, |_b, &a, &c| (c,a));
+ }
+
+ // extract the final results.
+ let reachable: Vec<(u32,u32)> = variable.complete();
+}
+```
+
+If you'd like to read more about how it works, check out [this blog post](https://github.com/frankmcsherry/blog/blob/master/posts/2018-05-19.md).
+
+## Authorship
+
+Datafrog was initially developed by [Frank McSherry][fmc] and was
+later transferred to the rust-lang-nursery organization. Thanks Frank!
+
+[fmc]: https://github.com/frankmcsherry
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/RELEASES.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/RELEASES.md
new file mode 100644
index 0000000..7d666f6
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/RELEASES.md
@@ -0,0 +1,26 @@
+# 2.0.1
+
+- Work around a rustdoc ICE (#24)
+
+# 2.0.0
+
+- Breaking changes:
+ - leapjoin now takes a tuple of leapers, and not a `&mut` slice:
+ - `from_leapjoin(&input, &mut [&mut foo.extend_with(...), ..], ..)` becomes
+ `from_leapjoin(&input, (foo.extend_with(...), ..), ..)`
+ - if there is only one leaper, no tuple is needed
+ - `Relation::from` now requires a vector, not an iterator; use
+ `Relation::from_iter` instead
+- Changed the API to permit using `Relation` and `Variable` more interchangeably,
+ and added a number of operations to construct relations directly, like `Relation::from_join`
+- Extended leapfrog triejoin with new operations (`PrefixFilter` and `ValueFilter`)
+
+# 1.0.0
+
+- Added leapfrog triejoin (#11).
+- Have badges and repo links now!
+- Minor performance improvements (#13).
+
+# 0.1.0
+
+- Initial release.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/borrow_check.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/borrow_check.rs
new file mode 100644
index 0000000..8f2197a
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/borrow_check.rs
@@ -0,0 +1,115 @@
+extern crate datafrog;
+use datafrog::Iteration;
+
+type Region = u32;
+type Borrow = u32;
+type Point = u32;
+
+fn main() {
+ let subset = {
+ // Create a new iteration context, ...
+ let mut iteration1 = Iteration::new();
+
+ // .. some variables, ..
+ let subset = iteration1.variable::<(Region, Region, Point)>("subset");
+
+ // different indices for `subset`.
+ let subset_r1p = iteration1.variable::<((Region, Point), Region)>("subset_r1p");
+ let subset_r2p = iteration1.variable::<((Region, Point), Region)>("subset_r2p");
+ let subset_p = iteration1.variable::<(Point, (Region, Region))>("subset_p");
+
+ // temporaries as we perform a multi-way join.
+ let subset_1 = iteration1.variable::<((Region, Point), Region)>("subset_1");
+ let subset_2 = iteration1.variable::<((Region, Point), Region)>("subset_2");
+
+ let region_live_at = iteration1.variable::<((Region, Point), ())>("region_live_at");
+ let cfg_edge_p = iteration1.variable::<(Point, Point)>("cfg_edge_p");
+
+ // load initial facts.
+ subset.insert(Vec::new().into());
+ region_live_at.insert(Vec::new().into());
+ cfg_edge_p.insert(Vec::new().into());
+
+ // .. and then start iterating rules!
+ while iteration1.changed() {
+ // remap fields to re-index by keys.
+ subset_r1p.from_map(&subset, |&(r1, r2, p)| ((r1, p), r2));
+ subset_r2p.from_map(&subset, |&(r1, r2, p)| ((r2, p), r1));
+ subset_p.from_map(&subset, |&(r1, r2, p)| (p, (r1, r2)));
+
+ // R0: subset(R1, R2, P) :- outlives(R1, R2, P).
+ // Already loaded; outlives is static.
+
+ // R1: subset(R1, R3, P) :-
+ // subset(R1, R2, P),
+ // subset(R2, R3, P).
+ subset.from_join(&subset_r2p, &subset_r1p, |&(_r2, p), &r1, &r3| (r1, r3, p));
+
+ // R2: subset(R1, R2, Q) :-
+ // subset(R1, R2, P),
+ // cfg_edge(P, Q),
+ // region_live_at(R1, Q),
+ // region_live_at(R2, Q).
+
+ subset_1.from_join(&subset_p, &cfg_edge_p, |&_p, &(r1, r2), &q| ((r1, q), r2));
+ subset_2.from_join(&subset_1, &region_live_at, |&(r1, q), &r2, &()| {
+ ((r2, q), r1)
+ });
+ subset.from_join(&subset_2, &region_live_at, |&(r2, q), &r1, &()| (r1, r2, q));
+ }
+
+ subset_r1p.complete()
+ };
+
+ let _requires = {
+ // Create a new iteration context, ...
+ let mut iteration2 = Iteration::new();
+
+ // .. some variables, ..
+ let requires = iteration2.variable::<(Region, Borrow, Point)>("requires");
+ requires.insert(Vec::new().into());
+
+ let requires_rp = iteration2.variable::<((Region, Point), Borrow)>("requires_rp");
+ let requires_bp = iteration2.variable::<((Borrow, Point), Region)>("requires_bp");
+
+ let requires_1 = iteration2.variable::<(Point, (Borrow, Region))>("requires_1");
+ let requires_2 = iteration2.variable::<((Region, Point), Borrow)>("requires_2");
+
+ let subset_r1p = iteration2.variable::<((Region, Point), Region)>("subset_r1p");
+ subset_r1p.insert(subset);
+
+ let killed = Vec::new().into();
+ let region_live_at = iteration2.variable::<((Region, Point), ())>("region_live_at");
+ let cfg_edge_p = iteration2.variable::<(Point, Point)>("cfg_edge_p");
+
+ // .. and then start iterating rules!
+ while iteration2.changed() {
+ requires_rp.from_map(&requires, |&(r, b, p)| ((r, p), b));
+ requires_bp.from_map(&requires, |&(r, b, p)| ((b, p), r));
+
+ // requires(R, B, P) :- borrow_region(R, B, P).
+ // Already loaded; borrow_region is static.
+
+ // requires(R2, B, P) :-
+ // requires(R1, B, P),
+ // subset(R1, R2, P).
+ requires.from_join(&requires_rp, &subset_r1p, |&(_r1, p), &b, &r2| (r2, b, p));
+
+ // requires(R, B, Q) :-
+ // requires(R, B, P),
+ // !killed(B, P),
+ // cfg_edge(P, Q),
+ // (region_live_at(R, Q); universal_region(R)).
+
+ requires_1.from_antijoin(&requires_bp, &killed, |&(b, p), &r| (p, (b, r)));
+ requires_2.from_join(&requires_1, &cfg_edge_p, |&_p, &(b, r), &q| ((r, q), b));
+ requires.from_join(&requires_2, &region_live_at, |&(r, q), &b, &()| (r, b, q));
+ }
+
+ requires.complete()
+ };
+
+ // borrow_live_at(B, P) :- requires(R, B, P), region_live_at(R, P)
+
+ // borrow_live_at(B, P) :- requires(R, B, P), universal_region(R).
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/graspan1.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/graspan1.rs
new file mode 100644
index 0000000..31225b1
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/examples/graspan1.rs
@@ -0,0 +1,62 @@
+extern crate datafrog;
+use datafrog::Iteration;
+
+fn main() {
+ let timer = ::std::time::Instant::now();
+
+ // Make space for input data.
+ let mut nodes = Vec::new();
+ let mut edges = Vec::new();
+
+ // Read input data from a handy file.
+ use std::fs::File;
+ use std::io::{BufRead, BufReader};
+
+ let filename = std::env::args().nth(1).unwrap();
+ let file = BufReader::new(File::open(filename).unwrap());
+ for readline in file.lines() {
+ let line = readline.expect("read error");
+ if !line.is_empty() && !line.starts_with('#') {
+ let mut elts = line[..].split_whitespace();
+ let src: u32 = elts.next().unwrap().parse().expect("malformed src");
+ let dst: u32 = elts.next().unwrap().parse().expect("malformed dst");
+ let typ: &str = elts.next().unwrap();
+ match typ {
+ "n" => {
+ nodes.push((dst, src));
+ }
+ "e" => {
+ edges.push((src, dst));
+ }
+ unk => panic!("unknown type: {}", unk),
+ }
+ }
+ }
+
+ println!("{:?}\tData loaded", timer.elapsed());
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // .. some variables, ..
+ let variable1 = iteration.variable::<(u32, u32)>("nodes");
+ let variable2 = iteration.variable::<(u32, u32)>("edges");
+
+ // .. load them with some initial values, ..
+ variable1.insert(nodes.into());
+ variable2.insert(edges.into());
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // N(a,c) <- N(a,b), E(b,c)
+ variable1.from_join(&variable1, &variable2, |_b, &a, &c| (c, a));
+ }
+
+ let reachable = variable1.complete();
+
+ println!(
+ "{:?}\tComputation complete (nodes_final: {})",
+ timer.elapsed(),
+ reachable.len()
+ );
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/join.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/join.rs
new file mode 100644
index 0000000..94270af
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/join.rs
@@ -0,0 +1,180 @@
+//! Join functionality.
+
+use super::{Relation, Variable};
+use std::cell::Ref;
+use std::ops::Deref;
+
+/// Implements `join`. Note that `input1` must be a variable, but
+/// `input2` can be either a variable or a relation. This is necessary
+/// because relations have no "recent" tuples, so the fn would be a
+/// guaranteed no-op if both arguments were relations. See also
+/// `join_into_relation`.
+pub(crate) fn join_into<'me, Key: Ord, Val1: Ord, Val2: Ord, Result: Ord>(
+ input1: &Variable<(Key, Val1)>,
+ input2: impl JoinInput<'me, (Key, Val2)>,
+ output: &Variable<Result>,
+ mut logic: impl FnMut(&Key, &Val1, &Val2) -> Result,
+) {
+ let mut results = Vec::new();
+
+ let recent1 = input1.recent();
+ let recent2 = input2.recent();
+
+ {
+ // scoped to let `closure` drop borrow of `results`.
+
+ let mut closure = |k: &Key, v1: &Val1, v2: &Val2| results.push(logic(k, v1, v2));
+
+ for batch2 in input2.stable().iter() {
+ join_helper(&recent1, &batch2, &mut closure);
+ }
+
+ for batch1 in input1.stable().iter() {
+ join_helper(&batch1, &recent2, &mut closure);
+ }
+
+ join_helper(&recent1, &recent2, &mut closure);
+ }
+
+ output.insert(Relation::from_vec(results));
+}
+
+/// Join, but for two relations.
+pub(crate) fn join_into_relation<'me, Key: Ord, Val1: Ord, Val2: Ord, Result: Ord>(
+ input1: &Relation<(Key, Val1)>,
+ input2: &Relation<(Key, Val2)>,
+ mut logic: impl FnMut(&Key, &Val1, &Val2) -> Result,
+) -> Relation<Result> {
+ let mut results = Vec::new();
+
+ join_helper(&input1.elements, &input2.elements, |k, v1, v2| {
+ results.push(logic(k, v1, v2));
+ });
+
+ Relation::from_vec(results)
+}
+
+/// Moves all recent tuples from `input1` that are not present in `input2` into `output`.
+pub(crate) fn antijoin<'me, Key: Ord, Val: Ord, Result: Ord>(
+ input1: impl JoinInput<'me, (Key, Val)>,
+ input2: &Relation<Key>,
+ mut logic: impl FnMut(&Key, &Val) -> Result,
+) -> Relation<Result> {
+ let mut tuples2 = &input2[..];
+
+ let results = input1
+ .recent()
+ .iter()
+ .filter(|(ref key, _)| {
+ tuples2 = gallop(tuples2, |k| k < key);
+ tuples2.first() != Some(key)
+ })
+ .map(|(ref key, ref val)| logic(key, val))
+ .collect::<Vec<_>>();
+
+ Relation::from_vec(results)
+}
+
+fn join_helper<K: Ord, V1, V2>(
+ mut slice1: &[(K, V1)],
+ mut slice2: &[(K, V2)],
+ mut result: impl FnMut(&K, &V1, &V2),
+) {
+ while !slice1.is_empty() && !slice2.is_empty() {
+ use std::cmp::Ordering;
+
+ // If the keys match produce tuples, else advance the smaller key until they might.
+ match slice1[0].0.cmp(&slice2[0].0) {
+ Ordering::Less => {
+ slice1 = gallop(slice1, |x| x.0 < slice2[0].0);
+ }
+ Ordering::Equal => {
+ // Determine the number of matching keys in each slice.
+ let count1 = slice1.iter().take_while(|x| x.0 == slice1[0].0).count();
+ let count2 = slice2.iter().take_while(|x| x.0 == slice2[0].0).count();
+
+ // Produce results from the cross-product of matches.
+ for index1 in 0..count1 {
+ for s2 in slice2[..count2].iter() {
+ result(&slice1[0].0, &slice1[index1].1, &s2.1);
+ }
+ }
+
+ // Advance slices past this key.
+ slice1 = &slice1[count1..];
+ slice2 = &slice2[count2..];
+ }
+ Ordering::Greater => {
+ slice2 = gallop(slice2, |x| x.0 < slice1[0].0);
+ }
+ }
+ }
+}
+
+pub(crate) fn gallop<T>(mut slice: &[T], mut cmp: impl FnMut(&T) -> bool) -> &[T] {
+ // if empty slice, or already >= element, return
+ if !slice.is_empty() && cmp(&slice[0]) {
+ let mut step = 1;
+ while step < slice.len() && cmp(&slice[step]) {
+ slice = &slice[step..];
+ step <<= 1;
+ }
+
+ step >>= 1;
+ while step > 0 {
+ if step < slice.len() && cmp(&slice[step]) {
+ slice = &slice[step..];
+ }
+ step >>= 1;
+ }
+
+ slice = &slice[1..]; // advance one, as we always stayed < value
+ }
+
+ slice
+}
+
+/// An input that can be used with `from_join`; either a `Variable` or a `Relation`.
+pub trait JoinInput<'me, Tuple: Ord>: Copy {
+ /// If we are on iteration N of the loop, these are the tuples
+ /// added on iteration N-1. (For a `Relation`, this is always an
+ /// empty slice.)
+ type RecentTuples: Deref<Target = [Tuple]>;
+
+ /// If we are on iteration N of the loop, these are the tuples
+ /// added on iteration N - 2 or before. (For a `Relation`, this is
+ /// just `self`.)
+ type StableTuples: Deref<Target = [Relation<Tuple>]>;
+
+ /// Get the set of recent tuples.
+ fn recent(self) -> Self::RecentTuples;
+
+ /// Get the set of stable tuples.
+ fn stable(self) -> Self::StableTuples;
+}
+
+impl<'me, Tuple: Ord> JoinInput<'me, Tuple> for &'me Variable<Tuple> {
+ type RecentTuples = Ref<'me, [Tuple]>;
+ type StableTuples = Ref<'me, [Relation<Tuple>]>;
+
+ fn recent(self) -> Self::RecentTuples {
+ Ref::map(self.recent.borrow(), |r| &r.elements[..])
+ }
+
+ fn stable(self) -> Self::StableTuples {
+ Ref::map(self.stable.borrow(), |v| &v[..])
+ }
+}
+
+impl<'me, Tuple: Ord> JoinInput<'me, Tuple> for &'me Relation<Tuple> {
+ type RecentTuples = &'me [Tuple];
+ type StableTuples = &'me [Relation<Tuple>];
+
+ fn recent(self) -> Self::RecentTuples {
+ &[]
+ }
+
+ fn stable(self) -> Self::StableTuples {
+ std::slice::from_ref(self)
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/lib.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/lib.rs
new file mode 100644
index 0000000..d2f9323
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/lib.rs
@@ -0,0 +1,567 @@
+//! A lightweight Datalog engine in Rust
+//!
+//! The intended design is that one has static `Relation` types that are sets
+//! of tuples, and `Variable` types that represent monotonically increasing
+//! sets of tuples.
+//!
+//! The types are mostly wrappers around `Vec<Tuple>` indicating sorted-ness,
+//! and the intent is that this code can be dropped in the middle of an otherwise
+//! normal Rust program, run to completion, and then the results extracted as
+//! vectors again.
+
+#![forbid(missing_docs)]
+
+use std::cell::RefCell;
+use std::cmp::Ordering;
+use std::iter::FromIterator;
+use std::rc::Rc;
+
+mod join;
+mod map;
+mod test;
+mod treefrog;
+pub use crate::join::JoinInput;
+pub use crate::treefrog::{
+ extend_anti::ExtendAnti,
+ extend_with::ExtendWith,
+ filter_anti::FilterAnti,
+ filter_with::FilterWith,
+ filters::{PrefixFilter, ValueFilter},
+ Leaper, Leapers, RelationLeaper,
+};
+
+/// A static, ordered list of key-value pairs.
+///
+/// A relation represents a fixed set of key-value pairs. In many places in a
+/// Datalog computation we want to be sure that certain relations are not able
+/// to vary (for example, in antijoins).
+#[derive(Clone)]
+pub struct Relation<Tuple: Ord> {
+ /// Sorted list of distinct tuples.
+ pub elements: Vec<Tuple>,
+}
+
+impl<Tuple: Ord> Relation<Tuple> {
+ /// Merges two relations into their union.
+ pub fn merge(self, other: Self) -> Self {
+ let Relation {
+ elements: mut elements1,
+ } = self;
+ let Relation {
+ elements: mut elements2,
+ } = other;
+
+ // If one of the element lists is zero-length, we don't need to do any work
+ if elements1.is_empty() {
+ return Relation {
+ elements: elements2,
+ };
+ }
+
+ if elements2.is_empty() {
+ return Relation {
+ elements: elements1,
+ };
+ }
+
+ // Make sure that elements1 starts with the lower element
+ // Will not panic since both collections must have at least 1 element at this point
+ if elements1[0] > elements2[0] {
+ std::mem::swap(&mut elements1, &mut elements2);
+ }
+
+ // Fast path for when all the new elements are after the exiting ones
+ if elements1[elements1.len() - 1] < elements2[0] {
+ elements1.extend(elements2.into_iter());
+ // println!("fast path");
+ return Relation {
+ elements: elements1,
+ };
+ }
+
+ let mut elements = Vec::with_capacity(elements1.len() + elements2.len());
+ let mut elements1 = elements1.drain(..);
+ let mut elements2 = elements2.drain(..).peekable();
+
+ elements.push(elements1.next().unwrap());
+ if elements.first() == elements2.peek() {
+ elements2.next();
+ }
+
+ for elem in elements1 {
+ while elements2.peek().map(|x| x.cmp(&elem)) == Some(Ordering::Less) {
+ elements.push(elements2.next().unwrap());
+ }
+ if elements2.peek().map(|x| x.cmp(&elem)) == Some(Ordering::Equal) {
+ elements2.next();
+ }
+ elements.push(elem);
+ }
+
+ // Finish draining second list
+ elements.extend(elements2);
+
+ Relation { elements }
+ }
+
+ /// Creates a `Relation` from the elements of the `iterator`.
+ ///
+ /// Same as the `from_iter` method from `std::iter::FromIterator` trait.
+ pub fn from_iter<I>(iterator: I) -> Self
+ where
+ I: IntoIterator<Item = Tuple>,
+ {
+ iterator.into_iter().collect()
+ }
+
+ /// Creates a `Relation` using the `leapjoin` logic;
+ /// see [`Variable::from_leapjoin`]
+ pub fn from_leapjoin<'leap, SourceTuple: Ord, Val: Ord + 'leap>(
+ source: &Relation<SourceTuple>,
+ leapers: impl Leapers<'leap, SourceTuple, Val>,
+ logic: impl FnMut(&SourceTuple, &Val) -> Tuple,
+ ) -> Self {
+ treefrog::leapjoin(&source.elements, leapers, logic)
+ }
+
+ /// Creates a `Relation` by joining the values from `input1` and
+ /// `input2` and then applying `logic`. Like
+ /// [`Variable::from_join`] except for use where the inputs are
+ /// not varying across iterations.
+ pub fn from_join<Key: Ord, Val1: Ord, Val2: Ord>(
+ input1: &Relation<(Key, Val1)>,
+ input2: &Relation<(Key, Val2)>,
+ logic: impl FnMut(&Key, &Val1, &Val2) -> Tuple,
+ ) -> Self {
+ join::join_into_relation(input1, input2, logic)
+ }
+
+ /// Creates a `Relation` by removing all values from `input1` that
+ /// share a key with `input2`, and then transforming the resulting
+ /// tuples with the `logic` closure. Like
+ /// [`Variable::from_antijoin`] except for use where the inputs
+ /// are not varying across iterations.
+ pub fn from_antijoin<Key: Ord, Val1: Ord>(
+ input1: &Relation<(Key, Val1)>,
+ input2: &Relation<Key>,
+ logic: impl FnMut(&Key, &Val1) -> Tuple,
+ ) -> Self {
+ join::antijoin(input1, input2, logic)
+ }
+
+ /// Construct a new relation by mapping another one. Equivalent to
+ /// creating an iterator but perhaps more convenient. Analogous to
+ /// `Variable::from_map`.
+ pub fn from_map<T2: Ord>(input: &Relation<T2>, logic: impl FnMut(&T2) -> Tuple) -> Self {
+ input.iter().map(logic).collect()
+ }
+
+ /// Creates a `Relation` from a vector of tuples.
+ pub fn from_vec(mut elements: Vec<Tuple>) -> Self {
+ elements.sort();
+ elements.dedup();
+ Relation { elements }
+ }
+}
+
+impl<Tuple: Ord> From<Vec<Tuple>> for Relation<Tuple> {
+ fn from(iterator: Vec<Tuple>) -> Self {
+ Self::from_vec(iterator)
+ }
+}
+
+impl<Tuple: Ord> FromIterator<Tuple> for Relation<Tuple> {
+ fn from_iter<I>(iterator: I) -> Self
+ where
+ I: IntoIterator<Item = Tuple>,
+ {
+ Relation::from_vec(iterator.into_iter().collect())
+ }
+}
+
+impl<'tuple, Tuple: 'tuple + Copy + Ord> FromIterator<&'tuple Tuple> for Relation<Tuple> {
+ fn from_iter<I>(iterator: I) -> Self
+ where
+ I: IntoIterator<Item = &'tuple Tuple>,
+ {
+ Relation::from_vec(iterator.into_iter().cloned().collect())
+ }
+}
+
+impl<Tuple: Ord> std::ops::Deref for Relation<Tuple> {
+ type Target = [Tuple];
+ fn deref(&self) -> &Self::Target {
+ &self.elements[..]
+ }
+}
+
+/// An iterative context for recursive evaluation.
+///
+/// An `Iteration` tracks monotonic variables, and monitors their progress.
+/// It can inform the user if they have ceased changing, at which point the
+/// computation should be done.
+pub struct Iteration {
+ variables: Vec<Box<dyn VariableTrait>>,
+}
+
+impl Iteration {
+ /// Create a new iterative context.
+ pub fn new() -> Self {
+ Iteration {
+ variables: Vec::new(),
+ }
+ }
+ /// Reports whether any of the monitored variables have changed since
+ /// the most recent call.
+ pub fn changed(&mut self) -> bool {
+ let mut result = false;
+ for variable in self.variables.iter_mut() {
+ if variable.changed() {
+ result = true;
+ }
+ }
+ result
+ }
+ /// Creates a new named variable associated with the iterative context.
+ pub fn variable<Tuple: Ord + 'static>(&mut self, name: &str) -> Variable<Tuple> {
+ let variable = Variable::new(name);
+ self.variables.push(Box::new(variable.clone()));
+ variable
+ }
+ /// Creates a new named variable associated with the iterative context.
+ ///
+ /// This variable will not be maintained distinctly, and may advertise tuples as
+ /// recent multiple times (perhaps unboundedly many times).
+ pub fn variable_indistinct<Tuple: Ord + 'static>(&mut self, name: &str) -> Variable<Tuple> {
+ let mut variable = Variable::new(name);
+ variable.distinct = false;
+ self.variables.push(Box::new(variable.clone()));
+ variable
+ }
+}
+
+/// A type that can report on whether it has changed.
+trait VariableTrait {
+ /// Reports whether the variable has changed since it was last asked.
+ fn changed(&mut self) -> bool;
+}
+
+/// An monotonically increasing set of `Tuple`s.
+///
+/// There are three stages in the lifecycle of a tuple:
+///
+/// 1. A tuple is added to `self.to_add`, but is not yet visible externally.
+/// 2. Newly added tuples are then promoted to `self.recent` for one iteration.
+/// 3. After one iteration, recent tuples are moved to `self.tuples` for posterity.
+///
+/// Each time `self.changed()` is called, the `recent` relation is folded into `tuples`,
+/// and the `to_add` relations are merged, potentially deduplicated against `tuples`, and
+/// then made `recent`. This way, across calls to `changed()` all added tuples are in
+/// `recent` at least once and eventually all are in `tuples`.
+///
+/// A `Variable` may optionally be instructed not to de-duplicate its tuples, for reasons
+/// of performance. Such a variable cannot be relied on to terminate iterative computation,
+/// and it is important that any cycle of derivations have at least one de-duplicating
+/// variable on it.
+pub struct Variable<Tuple: Ord> {
+ /// Should the variable be maintained distinctly.
+ distinct: bool,
+ /// A useful name for the variable.
+ name: String,
+ /// A list of relations whose union are the accepted tuples.
+ pub stable: Rc<RefCell<Vec<Relation<Tuple>>>>,
+ /// A list of recent tuples, still to be processed.
+ pub recent: Rc<RefCell<Relation<Tuple>>>,
+ /// A list of future tuples, to be introduced.
+ to_add: Rc<RefCell<Vec<Relation<Tuple>>>>,
+}
+
+// Operator implementations.
+impl<Tuple: Ord> Variable<Tuple> {
+ /// Adds tuples that result from joining `input1` and `input2` --
+ /// each of the inputs must be a set of (Key, Value) tuples. Both
+ /// `input1` and `input2` must have the same type of key (`K`) but
+ /// they can have distinct value types (`V1` and `V2`
+ /// respectively). The `logic` closure will be invoked for each
+ /// key that appears in both inputs; it is also given the two
+ /// values, and from those it should construct the resulting
+ /// value.
+ ///
+ /// Note that `input1` must be a variable, but `input2` can be a
+ /// relation or a variable. Therefore, you cannot join two
+ /// relations with this method. This is not because the result
+ /// would be wrong, but because it would be inefficient: the
+ /// result from such a join cannot vary across iterations (as
+ /// relations are fixed), so you should prefer to invoke `insert`
+ /// on a relation created by `Relation::from_join` instead.
+ ///
+ /// # Examples
+ ///
+ /// This example starts a collection with the pairs (x, x+1) and (x+1, x) for x in 0 .. 10.
+ /// It then adds pairs (y, z) for which (x, y) and (x, z) are present. Because the initial
+ /// pairs are symmetric, this should result in all pairs (x, y) for x and y in 0 .. 11.
+ ///
+ /// ```
+ /// use datafrog::{Iteration, Relation};
+ ///
+ /// let mut iteration = Iteration::new();
+ /// let variable = iteration.variable::<(usize, usize)>("source");
+ /// variable.extend((0 .. 10).map(|x| (x, x + 1)));
+ /// variable.extend((0 .. 10).map(|x| (x + 1, x)));
+ ///
+ /// while iteration.changed() {
+ /// variable.from_join(&variable, &variable, |&key, &val1, &val2| (val1, val2));
+ /// }
+ ///
+ /// let result = variable.complete();
+ /// assert_eq!(result.len(), 121);
+ /// ```
+ pub fn from_join<'me, K: Ord, V1: Ord, V2: Ord>(
+ &self,
+ input1: &'me Variable<(K, V1)>,
+ input2: impl JoinInput<'me, (K, V2)>,
+ logic: impl FnMut(&K, &V1, &V2) -> Tuple,
+ ) {
+ join::join_into(input1, input2, self, logic)
+ }
+
+ /// Adds tuples from `input1` whose key is not present in `input2`.
+ ///
+ /// Note that `input1` must be a variable: if you have a relation
+ /// instead, you can use `Relation::from_antijoin` and then
+ /// `Variable::insert`. Note that the result will not vary during
+ /// the iteration.
+ ///
+ /// # Examples
+ ///
+ /// This example starts a collection with the pairs (x, x+1) for x in 0 .. 10. It then
+ /// adds any pairs (x+1,x) for which x is not a multiple of three. That excludes four
+ /// pairs (for 0, 3, 6, and 9) which should leave us with 16 total pairs.
+ ///
+ /// ```
+ /// use datafrog::{Iteration, Relation};
+ ///
+ /// let mut iteration = Iteration::new();
+ /// let variable = iteration.variable::<(usize, usize)>("source");
+ /// variable.extend((0 .. 10).map(|x| (x, x + 1)));
+ ///
+ /// let relation: Relation<_> = (0 .. 10).filter(|x| x % 3 == 0).collect();
+ ///
+ /// while iteration.changed() {
+ /// variable.from_antijoin(&variable, &relation, |&key, &val| (val, key));
+ /// }
+ ///
+ /// let result = variable.complete();
+ /// assert_eq!(result.len(), 16);
+ /// ```
+ pub fn from_antijoin<K: Ord, V: Ord>(
+ &self,
+ input1: &Variable<(K, V)>,
+ input2: &Relation<K>,
+ logic: impl FnMut(&K, &V) -> Tuple,
+ ) {
+ self.insert(join::antijoin(input1, input2, logic))
+ }
+
+ /// Adds tuples that result from mapping `input`.
+ ///
+ /// # Examples
+ ///
+ /// This example starts a collection with the pairs (x, x) for x in 0 .. 10. It then
+ /// repeatedly adds any pairs (x, z) for (x, y) in the collection, where z is the Collatz
+ /// step for y: it is y/2 if y is even, and 3*y + 1 if y is odd. This produces all of the
+ /// pairs (x, y) where x visits y as part of its Collatz journey.
+ ///
+ /// ```
+ /// use datafrog::{Iteration, Relation};
+ ///
+ /// let mut iteration = Iteration::new();
+ /// let variable = iteration.variable::<(usize, usize)>("source");
+ /// variable.extend((0 .. 10).map(|x| (x, x)));
+ ///
+ /// while iteration.changed() {
+ /// variable.from_map(&variable, |&(key, val)|
+ /// if val % 2 == 0 {
+ /// (key, val/2)
+ /// }
+ /// else {
+ /// (key, 3*val + 1)
+ /// });
+ /// }
+ ///
+ /// let result = variable.complete();
+ /// assert_eq!(result.len(), 74);
+ /// ```
+ pub fn from_map<T2: Ord>(&self, input: &Variable<T2>, logic: impl FnMut(&T2) -> Tuple) {
+ map::map_into(input, self, logic)
+ }
+
+ /// Adds tuples that result from combining `source` with the
+ /// relations given in `leapers`. This operation is very flexible
+ /// and can be used to do a combination of joins and anti-joins.
+ /// The main limitation is that the things being combined must
+ /// consist of one dynamic variable (`source`) and then several
+ /// fixed relations (`leapers`).
+ ///
+ /// The idea is as follows:
+ ///
+ /// - You will be inserting new tuples that result from joining (and anti-joining)
+ /// some dynamic variable `source` of source tuples (`SourceTuple`)
+ /// with some set of values (of type `Val`).
+ /// - You provide these values by combining `source` with a set of leapers
+ /// `leapers`, each of which is derived from a fixed relation. The `leapers`
+ /// should be either a single leaper (of suitable type) or else a tuple of leapers.
+ /// You can create a leaper in one of two ways:
+ /// - Extension: In this case, you have a relation of type `(K, Val)` for some
+ /// type `K`. You provide a closure that maps from `SourceTuple` to the key
+ /// `K`. If you use `relation.extend_with`, then any `Val` values the
+ /// relation provides will be added to the set of values; if you use
+ /// `extend_anti`, then the `Val` values will be removed.
+ /// - Filtering: In this case, you have a relation of type `K` for some
+ /// type `K` and you provide a closure that maps from `SourceTuple` to
+ /// the key `K`. Filters don't provide values but they remove source
+ /// tuples.
+ /// - Finally, you get a callback `logic` that accepts each `(SourceTuple, Val)`
+ /// that was successfully joined (and not filtered) and which maps to the
+ /// type of this variable.
+ pub fn from_leapjoin<'leap, SourceTuple: Ord, Val: Ord + 'leap>(
+ &self,
+ source: &Variable<SourceTuple>,
+ leapers: impl Leapers<'leap, SourceTuple, Val>,
+ logic: impl FnMut(&SourceTuple, &Val) -> Tuple,
+ ) {
+ self.insert(treefrog::leapjoin(&source.recent.borrow(), leapers, logic));
+ }
+}
+
+impl<Tuple: Ord> Clone for Variable<Tuple> {
+ fn clone(&self) -> Self {
+ Variable {
+ distinct: self.distinct,
+ name: self.name.clone(),
+ stable: self.stable.clone(),
+ recent: self.recent.clone(),
+ to_add: self.to_add.clone(),
+ }
+ }
+}
+
+impl<Tuple: Ord> Variable<Tuple> {
+ fn new(name: &str) -> Self {
+ Variable {
+ distinct: true,
+ name: name.to_string(),
+ stable: Rc::new(RefCell::new(Vec::new())),
+ recent: Rc::new(RefCell::new(Vec::new().into())),
+ to_add: Rc::new(RefCell::new(Vec::new())),
+ }
+ }
+
+ /// Inserts a relation into the variable.
+ ///
+ /// This is most commonly used to load initial values into a variable.
+ /// it is not obvious that it should be commonly used otherwise, but
+ /// it should not be harmful.
+ pub fn insert(&self, relation: Relation<Tuple>) {
+ if !relation.is_empty() {
+ self.to_add.borrow_mut().push(relation);
+ }
+ }
+
+ /// Extend the variable with values from the iterator.
+ ///
+ /// This is most commonly used to load initial values into a variable.
+ /// it is not obvious that it should be commonly used otherwise, but
+ /// it should not be harmful.
+ pub fn extend<T>(&self, iterator: impl IntoIterator<Item = T>)
+ where
+ Relation<Tuple>: FromIterator<T>,
+ {
+ self.insert(iterator.into_iter().collect());
+ }
+
+ /// Consumes the variable and returns a relation.
+ ///
+ /// This method removes the ability for the variable to develop, and
+ /// flattens all internal tuples down to one relation. The method
+ /// asserts that iteration has completed, in that `self.recent` and
+ /// `self.to_add` should both be empty.
+ pub fn complete(self) -> Relation<Tuple> {
+ assert!(self.recent.borrow().is_empty());
+ assert!(self.to_add.borrow().is_empty());
+ let mut result: Relation<Tuple> = Vec::new().into();
+ while let Some(batch) = self.stable.borrow_mut().pop() {
+ result = result.merge(batch);
+ }
+ result
+ }
+}
+
+impl<Tuple: Ord> VariableTrait for Variable<Tuple> {
+ fn changed(&mut self) -> bool {
+ // 1. Merge self.recent into self.stable.
+ if !self.recent.borrow().is_empty() {
+ let mut recent =
+ ::std::mem::replace(&mut (*self.recent.borrow_mut()), Vec::new().into());
+ while self
+ .stable
+ .borrow()
+ .last()
+ .map(|x| x.len() <= 2 * recent.len())
+ == Some(true)
+ {
+ let last = self.stable.borrow_mut().pop().unwrap();
+ recent = recent.merge(last);
+ }
+ self.stable.borrow_mut().push(recent);
+ }
+
+ // 2. Move self.to_add into self.recent.
+ let to_add = self.to_add.borrow_mut().pop();
+ if let Some(mut to_add) = to_add {
+ while let Some(to_add_more) = self.to_add.borrow_mut().pop() {
+ to_add = to_add.merge(to_add_more);
+ }
+ // 2b. Restrict `to_add` to tuples not in `self.stable`.
+ if self.distinct {
+ for batch in self.stable.borrow().iter() {
+ let mut slice = &batch[..];
+ // Only gallop if the slice is relatively large.
+ if slice.len() > 4 * to_add.elements.len() {
+ to_add.elements.retain(|x| {
+ slice = join::gallop(slice, |y| y < x);
+ slice.is_empty() || &slice[0] != x
+ });
+ } else {
+ to_add.elements.retain(|x| {
+ while !slice.is_empty() && &slice[0] < x {
+ slice = &slice[1..];
+ }
+ slice.is_empty() || &slice[0] != x
+ });
+ }
+ }
+ }
+ *self.recent.borrow_mut() = to_add;
+ }
+
+ // let mut total = 0;
+ // for tuple in self.stable.borrow().iter() {
+ // total += tuple.len();
+ // }
+
+ // println!("Variable\t{}\t{}\t{}", self.name, total, self.recent.borrow().len());
+
+ !self.recent.borrow().is_empty()
+ }
+}
+
+// impl<Tuple: Ord> Drop for Variable<Tuple> {
+// fn drop(&mut self) {
+// let mut total = 0;
+// for batch in self.stable.borrow().iter() {
+// total += batch.len();
+// }
+// println!("FINAL: {:?}\t{:?}", self.name, total);
+// }
+// }
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/map.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/map.rs
new file mode 100644
index 0000000..1a8c101
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/map.rs
@@ -0,0 +1,13 @@
+//! Map functionality.
+
+use super::{Relation, Variable};
+
+pub(crate) fn map_into<T1: Ord, T2: Ord>(
+ input: &Variable<T1>,
+ output: &Variable<T2>,
+ logic: impl FnMut(&T1) -> T2,
+) {
+ let results: Vec<T2> = input.recent.borrow().iter().map(logic).collect();
+
+ output.insert(Relation::from_vec(results));
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/test.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/test.rs
new file mode 100644
index 0000000..9d5af35
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/test.rs
@@ -0,0 +1,195 @@
+#![cfg(test)]
+
+use crate::Iteration;
+use crate::Relation;
+use crate::RelationLeaper;
+use proptest::prelude::*;
+use proptest::{proptest, proptest_helper};
+
+fn inputs() -> impl Strategy<Value = Vec<(u32, u32)>> {
+ prop::collection::vec((0_u32..100, 0_u32..100), 1..500)
+}
+
+/// The original way to use datafrog -- computes reachable nodes from a set of edges
+fn reachable_with_var_join(edges: &[(u32, u32)]) -> Relation<(u32, u32)> {
+ let edges: Relation<_> = edges.iter().collect();
+ let mut iteration = Iteration::new();
+
+ let edges_by_successor = iteration.variable::<(u32, u32)>("edges_invert");
+ edges_by_successor.extend(edges.iter().map(|&(n1, n2)| (n2, n1)));
+
+ let reachable = iteration.variable::<(u32, u32)>("reachable");
+ reachable.insert(edges);
+
+ while iteration.changed() {
+ // reachable(N1, N3) :- edges(N1, N2), reachable(N2, N3).
+ reachable.from_join(&reachable, &edges_by_successor, |&_, &n3, &n1| (n1, n3));
+ }
+
+ reachable.complete()
+}
+
+/// Like `reachable`, but using a relation as an input to `from_join`
+fn reachable_with_relation_join(edges: &[(u32, u32)]) -> Relation<(u32, u32)> {
+ let edges: Relation<_> = edges.iter().collect();
+ let mut iteration = Iteration::new();
+
+ // NB. Changed from `reachable_with_var_join`:
+ let edges_by_successor: Relation<_> = edges.iter().map(|&(n1, n2)| (n2, n1)).collect();
+
+ let reachable = iteration.variable::<(u32, u32)>("reachable");
+ reachable.insert(edges);
+
+ while iteration.changed() {
+ // reachable(N1, N3) :- edges(N1, N2), reachable(N2, N3).
+ reachable.from_join(&reachable, &edges_by_successor, |&_, &n3, &n1| (n1, n3));
+ }
+
+ reachable.complete()
+}
+
+fn reachable_with_leapfrog(edges: &[(u32, u32)]) -> Relation<(u32, u32)> {
+ let edges: Relation<_> = edges.iter().collect();
+ let mut iteration = Iteration::new();
+
+ let edges_by_successor: Relation<_> = edges.iter().map(|&(n1, n2)| (n2, n1)).collect();
+
+ let reachable = iteration.variable::<(u32, u32)>("reachable");
+ reachable.insert(edges);
+
+ while iteration.changed() {
+ // reachable(N1, N3) :- edges(N1, N2), reachable(N2, N3).
+ reachable.from_leapjoin(
+ &reachable,
+ edges_by_successor.extend_with(|&(n2, _)| n2),
+ |&(_, n3), &n1| (n1, n3),
+ );
+ }
+
+ reachable.complete()
+}
+
+/// Computes a join where the values are summed -- uses iteration
+/// variables (the original datafrog technique).
+fn sum_join_via_var(
+ input1_slice: &[(u32, u32)],
+ input2_slice: &[(u32, u32)],
+) -> Relation<(u32, u32)> {
+ let mut iteration = Iteration::new();
+
+ let input1 = iteration.variable::<(u32, u32)>("input1");
+ input1.extend(input1_slice);
+
+ let input2 = iteration.variable::<(u32, u32)>("input1");
+ input2.extend(input2_slice);
+
+ let output = iteration.variable::<(u32, u32)>("output");
+
+ while iteration.changed() {
+ // output(K1, V1 * 100 + V2) :- input1(K1, V1), input2(K1, V2).
+ output.from_join(&input1, &input2, |&k1, &v1, &v2| (k1, v1 * 100 + v2));
+ }
+
+ output.complete()
+}
+
+/// Computes a join where the values are summed -- uses iteration
+/// variables (the original datafrog technique).
+fn sum_join_via_relation(
+ input1_slice: &[(u32, u32)],
+ input2_slice: &[(u32, u32)],
+) -> Relation<(u32, u32)> {
+ let input1: Relation<_> = input1_slice.iter().collect();
+ let input2: Relation<_> = input2_slice.iter().collect();
+ Relation::from_join(&input1, &input2, |&k1, &v1, &v2| (k1, v1 * 100 + v2))
+}
+
+proptest! {
+ #[test]
+ fn reachable_leapfrog_vs_var_join(edges in inputs()) {
+ let reachable1 = reachable_with_var_join(&edges);
+ let reachable2 = reachable_with_leapfrog(&edges);
+ assert_eq!(reachable1.elements, reachable2.elements);
+ }
+
+ #[test]
+ fn reachable_rel_join_vs_var_join(edges in inputs()) {
+ let reachable1 = reachable_with_var_join(&edges);
+ let reachable2 = reachable_with_relation_join(&edges);
+ assert_eq!(reachable1.elements, reachable2.elements);
+ }
+
+ #[test]
+ fn sum_join_from_var_vs_rel((set1, set2) in (inputs(), inputs())) {
+ let output1 = sum_join_via_var(&set1, &set2);
+ let output2 = sum_join_via_relation(&set1, &set2);
+ assert_eq!(output1.elements, output2.elements);
+ }
+
+ /// Test the behavior of `filter_anti` used on its own in a
+ /// leapjoin -- effectively it becomes an "intersection"
+ /// operation.
+ #[test]
+ fn filter_with_on_its_own((set1, set2) in (inputs(), inputs())) {
+ let input1: Relation<(u32, u32)> = set1.iter().collect();
+ let input2: Relation<(u32, u32)> = set2.iter().collect();
+ let intersection1 = Relation::from_leapjoin(
+ &input1,
+ input2.filter_with(|&tuple| tuple),
+ |&tuple, &()| tuple,
+ );
+
+ let intersection2: Relation<(u32, u32)> = input1.elements.iter()
+ .filter(|t| input2.elements.binary_search(&t).is_ok())
+ .collect();
+
+ assert_eq!(intersection1.elements, intersection2.elements);
+ }
+
+ /// Test the behavior of `filter_anti` used on its own in a
+ /// leapjoin -- effectively it becomes a "set minus" operation.
+ #[test]
+ fn filter_anti_on_its_own((set1, set2) in (inputs(), inputs())) {
+ let input1: Relation<(u32, u32)> = set1.iter().collect();
+ let input2: Relation<(u32, u32)> = set2.iter().collect();
+
+ let difference1 = Relation::from_leapjoin(
+ &input1,
+ input2.filter_anti(|&tuple| tuple),
+ |&tuple, &()| tuple,
+ );
+
+ let difference2: Relation<(u32, u32)> = input1.elements.iter()
+ .filter(|t| input2.elements.binary_search(&t).is_err())
+ .collect();
+
+ assert_eq!(difference1.elements, difference2.elements);
+ }
+}
+
+/// Test that `from_leapjoin` matches against the tuples from an
+/// `extend` that precedes first iteration.
+///
+/// This was always true, but wasn't immediately obvious to me until I
+/// re-read the code more carefully. -nikomatsakis
+#[test]
+fn leapjoin_from_extend() {
+ let doubles: Relation<(u32, u32)> = (0..10).map(|i| (i, i * 2)).collect();
+
+ let mut iteration = Iteration::new();
+
+ let variable = iteration.variable::<(u32, u32)>("variable");
+ variable.extend(Some((2, 2)));
+
+ while iteration.changed() {
+ variable.from_leapjoin(
+ &variable,
+ doubles.extend_with(|&(i, _)| i),
+ |&(i, _), &j| (i, j),
+ );
+ }
+
+ let variable = variable.complete();
+
+ assert_eq!(variable.elements, vec![(2, 2), (2, 4)]);
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/treefrog.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/treefrog.rs
new file mode 100644
index 0000000..2ad238f
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/datafrog/src/treefrog.rs
@@ -0,0 +1,661 @@
+//! Join functionality.
+
+use super::Relation;
+
+/// Performs treefrog leapjoin using a list of leapers.
+pub(crate) fn leapjoin<'leap, Tuple: Ord, Val: Ord + 'leap, Result: Ord>(
+ source: &[Tuple],
+ mut leapers: impl Leapers<'leap, Tuple, Val>,
+ mut logic: impl FnMut(&Tuple, &Val) -> Result,
+) -> Relation<Result> {
+ let mut result = Vec::new(); // temp output storage.
+ let mut values = Vec::new(); // temp value storage.
+
+ for tuple in source {
+ // Determine which leaper would propose the fewest values.
+ let mut min_index = usize::max_value();
+ let mut min_count = usize::max_value();
+ leapers.for_each_count(tuple, |index, count| {
+ if min_count > count {
+ min_count = count;
+ min_index = index;
+ }
+ });
+
+ // We had best have at least one relation restricting values.
+ assert!(min_count < usize::max_value());
+
+ // If there are values to propose:
+ if min_count > 0 {
+ // Push the values that `min_index` "proposes" into `values`.
+ leapers.propose(tuple, min_index, &mut values);
+
+ // Give other leapers a chance to remove values from
+ // anti-joins or filters.
+ leapers.intersect(tuple, min_index, &mut values);
+
+ // Push remaining items into result.
+ for val in values.drain(..) {
+ result.push(logic(tuple, val));
+ }
+ }
+ }
+
+ Relation::from_vec(result)
+}
+
+/// Implemented for a tuple of leapers
+pub trait Leapers<'leap, Tuple, Val> {
+ /// Internal method:
+ fn for_each_count(&mut self, tuple: &Tuple, op: impl FnMut(usize, usize));
+
+ /// Internal method:
+ fn propose(&mut self, tuple: &Tuple, min_index: usize, values: &mut Vec<&'leap Val>);
+
+ /// Internal method:
+ fn intersect(&mut self, tuple: &Tuple, min_index: usize, values: &mut Vec<&'leap Val>);
+}
+
+macro_rules! tuple_leapers {
+ ($($Ty:ident)*) => {
+ #[allow(unused_assignments, non_snake_case)]
+ impl<'leap, Tuple, Val, $($Ty),*> Leapers<'leap, Tuple, Val> for ($($Ty,)*)
+ where
+ $($Ty: Leaper<'leap, Tuple, Val>,)*
+ {
+ fn for_each_count(&mut self, tuple: &Tuple, mut op: impl FnMut(usize, usize)) {
+ let ($($Ty,)*) = self;
+ let mut index = 0;
+ $(
+ let count = $Ty.count(tuple);
+ op(index, count);
+ index += 1;
+ )*
+ }
+
+ fn propose(&mut self, tuple: &Tuple, min_index: usize, values: &mut Vec<&'leap Val>) {
+ let ($($Ty,)*) = self;
+ let mut index = 0;
+ $(
+ if min_index == index {
+ return $Ty.propose(tuple, values);
+ }
+ index += 1;
+ )*
+ panic!("no match found for min_index={}", min_index);
+ }
+
+ fn intersect(&mut self, tuple: &Tuple, min_index: usize, values: &mut Vec<&'leap Val>) {
+ let ($($Ty,)*) = self;
+ let mut index = 0;
+ $(
+ if min_index != index {
+ $Ty.intersect(tuple, values);
+ }
+ index += 1;
+ )*
+ }
+ }
+ }
+}
+
+tuple_leapers!(A B);
+tuple_leapers!(A B C);
+tuple_leapers!(A B C D);
+tuple_leapers!(A B C D E);
+tuple_leapers!(A B C D E F);
+tuple_leapers!(A B C D E F G);
+
+/// Methods to support treefrog leapjoin.
+pub trait Leaper<'leap, Tuple, Val> {
+ /// Estimates the number of proposed values.
+ fn count(&mut self, prefix: &Tuple) -> usize;
+ /// Populates `values` with proposed values.
+ fn propose(&mut self, prefix: &Tuple, values: &mut Vec<&'leap Val>);
+ /// Restricts `values` to proposed values.
+ fn intersect(&mut self, prefix: &Tuple, values: &mut Vec<&'leap Val>);
+}
+
+pub(crate) mod filters {
+ use super::Leaper;
+ use super::Leapers;
+
+ /// A treefrog leaper that tests each of the tuples from the main
+ /// input (the "prefix"). Use like `PrefixFilter::from(|tuple|
+ /// ...)`; if the closure returns true, then the tuple is
+ /// retained, else it will be ignored. This leaper can be used in
+ /// isolation in which case it just acts like a filter on the
+ /// input (the "proposed value" will be `()` type).
+ pub struct PrefixFilter<Tuple, Func: Fn(&Tuple) -> bool> {
+ phantom: ::std::marker::PhantomData<Tuple>,
+ predicate: Func,
+ }
+
+ impl<'leap, Tuple, Func> PrefixFilter<Tuple, Func>
+ where
+ Func: Fn(&Tuple) -> bool,
+ {
+ /// Creates a new filter based on the prefix
+ pub fn from(predicate: Func) -> Self {
+ PrefixFilter {
+ phantom: ::std::marker::PhantomData,
+ predicate,
+ }
+ }
+ }
+
+ impl<'leap, Tuple, Val, Func> Leaper<'leap, Tuple, Val> for PrefixFilter<Tuple, Func>
+ where
+ Func: Fn(&Tuple) -> bool,
+ {
+ /// Estimates the number of proposed values.
+ fn count(&mut self, prefix: &Tuple) -> usize {
+ if (self.predicate)(prefix) {
+ usize::max_value()
+ } else {
+ 0
+ }
+ }
+ /// Populates `values` with proposed values.
+ fn propose(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val>) {
+ panic!("PrefixFilter::propose(): variable apparently unbound");
+ }
+ /// Restricts `values` to proposed values.
+ fn intersect(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val>) {
+ // We can only be here if we returned max_value() above.
+ }
+ }
+
+ impl<'leap, Tuple, Func> Leapers<'leap, Tuple, ()> for PrefixFilter<Tuple, Func>
+ where
+ Func: Fn(&Tuple) -> bool,
+ {
+ fn for_each_count(&mut self, tuple: &Tuple, mut op: impl FnMut(usize, usize)) {
+ if <Self as Leaper<'_, Tuple, ()>>::count(self, tuple) == 0 {
+ op(0, 0)
+ } else {
+ // we will "propose" the `()` value if the predicate applies
+ op(0, 1)
+ }
+ }
+
+ fn propose(&mut self, _: &Tuple, min_index: usize, values: &mut Vec<&'leap ()>) {
+ assert_eq!(min_index, 0);
+ values.push(&());
+ }
+
+ fn intersect(&mut self, _: &Tuple, min_index: usize, values: &mut Vec<&'leap ()>) {
+ assert_eq!(min_index, 0);
+ assert_eq!(values.len(), 1);
+ }
+ }
+
+ /// A treefrog leaper based on a predicate of prefix and value.
+ /// Use like `ValueFilter::from(|tuple, value| ...)`. The closure
+ /// should return true if `value` ought to be retained. The
+ /// `value` will be a value proposed elsewhere by an `extend_with`
+ /// leaper.
+ ///
+ /// This leaper cannot be used in isolation, it must be combined
+ /// with other leapers.
+ pub struct ValueFilter<Tuple, Val, Func: Fn(&Tuple, &Val) -> bool> {
+ phantom: ::std::marker::PhantomData<(Tuple, Val)>,
+ predicate: Func,
+ }
+
+ impl<'leap, Tuple, Val, Func> ValueFilter<Tuple, Val, Func>
+ where
+ Func: Fn(&Tuple, &Val) -> bool,
+ {
+ /// Creates a new filter based on the prefix
+ pub fn from(predicate: Func) -> Self {
+ ValueFilter {
+ phantom: ::std::marker::PhantomData,
+ predicate,
+ }
+ }
+ }
+
+ impl<'leap, Tuple, Val, Func> Leaper<'leap, Tuple, Val> for ValueFilter<Tuple, Val, Func>
+ where
+ Func: Fn(&Tuple, &Val) -> bool,
+ {
+ /// Estimates the number of proposed values.
+ fn count(&mut self, _prefix: &Tuple) -> usize {
+ usize::max_value()
+ }
+ /// Populates `values` with proposed values.
+ fn propose(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val>) {
+ panic!("PrefixFilter::propose(): variable apparently unbound");
+ }
+ /// Restricts `values` to proposed values.
+ fn intersect(&mut self, prefix: &Tuple, values: &mut Vec<&'leap Val>) {
+ values.retain(|val| (self.predicate)(prefix, val));
+ }
+ }
+
+}
+
+/// Extension method for relations.
+pub trait RelationLeaper<Key: Ord, Val: Ord> {
+ /// Extend with `Val` using the elements of the relation.
+ fn extend_with<'leap, Tuple: Ord, Func: Fn(&Tuple) -> Key>(
+ &'leap self,
+ key_func: Func,
+ ) -> extend_with::ExtendWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap;
+ /// Extend with `Val` using the complement of the relation.
+ fn extend_anti<'leap, Tuple: Ord, Func: Fn(&Tuple) -> Key>(
+ &'leap self,
+ key_func: Func,
+ ) -> extend_anti::ExtendAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap;
+ /// Extend with any value if tuple is present in relation.
+ fn filter_with<'leap, Tuple: Ord, Func: Fn(&Tuple) -> (Key, Val)>(
+ &'leap self,
+ key_func: Func,
+ ) -> filter_with::FilterWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap;
+ /// Extend with any value if tuple is absent from relation.
+ fn filter_anti<'leap, Tuple: Ord, Func: Fn(&Tuple) -> (Key, Val)>(
+ &'leap self,
+ key_func: Func,
+ ) -> filter_anti::FilterAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap;
+}
+
+impl<Key: Ord, Val: Ord> RelationLeaper<Key, Val> for Relation<(Key, Val)> {
+ fn extend_with<'leap, Tuple: Ord, Func: Fn(&Tuple) -> Key>(
+ &'leap self,
+ key_func: Func,
+ ) -> extend_with::ExtendWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap,
+ {
+ extend_with::ExtendWith::from(self, key_func)
+ }
+ fn extend_anti<'leap, Tuple: Ord, Func: Fn(&Tuple) -> Key>(
+ &'leap self,
+ key_func: Func,
+ ) -> extend_anti::ExtendAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap,
+ {
+ extend_anti::ExtendAnti::from(self, key_func)
+ }
+ fn filter_with<'leap, Tuple: Ord, Func: Fn(&Tuple) -> (Key, Val)>(
+ &'leap self,
+ key_func: Func,
+ ) -> filter_with::FilterWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap,
+ {
+ filter_with::FilterWith::from(self, key_func)
+ }
+ fn filter_anti<'leap, Tuple: Ord, Func: Fn(&Tuple) -> (Key, Val)>(
+ &'leap self,
+ key_func: Func,
+ ) -> filter_anti::FilterAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: 'leap,
+ Val: 'leap,
+ {
+ filter_anti::FilterAnti::from(self, key_func)
+ }
+}
+
+pub(crate) mod extend_with {
+ use super::{binary_search, Leaper, Leapers, Relation};
+ use crate::join::gallop;
+
+ /// Wraps a Relation<Tuple> as a leaper.
+ pub struct ExtendWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ relation: &'leap Relation<(Key, Val)>,
+ start: usize,
+ end: usize,
+ key_func: Func,
+ phantom: ::std::marker::PhantomData<Tuple>,
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> ExtendWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ /// Constructs a ExtendWith from a relation and key and value function.
+ pub fn from(relation: &'leap Relation<(Key, Val)>, key_func: Func) -> Self {
+ ExtendWith {
+ relation,
+ start: 0,
+ end: 0,
+ key_func,
+ phantom: ::std::marker::PhantomData,
+ }
+ }
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> Leaper<'leap, Tuple, Val>
+ for ExtendWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ fn count(&mut self, prefix: &Tuple) -> usize {
+ let key = (self.key_func)(prefix);
+ self.start = binary_search(&self.relation[..], |x| &x.0 < &key);
+ let slice1 = &self.relation[self.start..];
+ let slice2 = gallop(slice1, |x| &x.0 <= &key);
+ self.end = self.relation.len() - slice2.len();
+ slice1.len() - slice2.len()
+ }
+ fn propose(&mut self, _prefix: &Tuple, values: &mut Vec<&'leap Val>) {
+ let slice = &self.relation[self.start..self.end];
+ values.extend(slice.iter().map(|&(_, ref val)| val));
+ }
+ fn intersect(&mut self, _prefix: &Tuple, values: &mut Vec<&'leap Val>) {
+ let mut slice = &self.relation[self.start..self.end];
+ values.retain(|v| {
+ slice = gallop(slice, |kv| &kv.1 < v);
+ slice.get(0).map(|kv| &kv.1) == Some(v)
+ });
+ }
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> Leapers<'leap, Tuple, Val>
+ for ExtendWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ fn for_each_count(&mut self, tuple: &Tuple, mut op: impl FnMut(usize, usize)) {
+ op(0, self.count(tuple))
+ }
+
+ fn propose(&mut self, tuple: &Tuple, min_index: usize, values: &mut Vec<&'leap Val>) {
+ assert_eq!(min_index, 0);
+ Leaper::propose(self, tuple, values);
+ }
+
+ fn intersect(&mut self, _: &Tuple, min_index: usize, _: &mut Vec<&'leap Val>) {
+ assert_eq!(min_index, 0);
+ }
+ }
+}
+
+pub(crate) mod extend_anti {
+ use super::{binary_search, Leaper, Relation};
+ use crate::join::gallop;
+
+ /// Wraps a Relation<Tuple> as a leaper.
+ pub struct ExtendAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ relation: &'leap Relation<(Key, Val)>,
+ key_func: Func,
+ phantom: ::std::marker::PhantomData<Tuple>,
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> ExtendAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ /// Constructs a ExtendAnti from a relation and key and value function.
+ pub fn from(relation: &'leap Relation<(Key, Val)>, key_func: Func) -> Self {
+ ExtendAnti {
+ relation,
+ key_func,
+ phantom: ::std::marker::PhantomData,
+ }
+ }
+ }
+
+ impl<'leap, Key: Ord, Val: Ord + 'leap, Tuple: Ord, Func> Leaper<'leap, Tuple, Val>
+ for ExtendAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> Key,
+ {
+ fn count(&mut self, _prefix: &Tuple) -> usize {
+ usize::max_value()
+ }
+ fn propose(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val>) {
+ panic!("ExtendAnti::propose(): variable apparently unbound.");
+ }
+ fn intersect(&mut self, prefix: &Tuple, values: &mut Vec<&'leap Val>) {
+ let key = (self.key_func)(prefix);
+ let start = binary_search(&self.relation[..], |x| &x.0 < &key);
+ let slice1 = &self.relation[start..];
+ let slice2 = gallop(slice1, |x| &x.0 <= &key);
+ let mut slice = &slice1[..(slice1.len() - slice2.len())];
+ if !slice.is_empty() {
+ values.retain(|v| {
+ slice = gallop(slice, |kv| &kv.1 < v);
+ slice.get(0).map(|kv| &kv.1) != Some(v)
+ });
+ }
+ }
+ }
+}
+
+pub(crate) mod filter_with {
+
+ use super::{Leaper, Leapers, Relation};
+
+ /// Wraps a Relation<Tuple> as a leaper.
+ pub struct FilterWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ relation: &'leap Relation<(Key, Val)>,
+ key_func: Func,
+ phantom: ::std::marker::PhantomData<Tuple>,
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> FilterWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ /// Constructs a FilterWith from a relation and key and value function.
+ pub fn from(relation: &'leap Relation<(Key, Val)>, key_func: Func) -> Self {
+ FilterWith {
+ relation,
+ key_func,
+ phantom: ::std::marker::PhantomData,
+ }
+ }
+ }
+
+ impl<'leap, Key, Val, Val2, Tuple, Func> Leaper<'leap, Tuple, Val2>
+ for FilterWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ fn count(&mut self, prefix: &Tuple) -> usize {
+ let key_val = (self.key_func)(prefix);
+ if self.relation.binary_search(&key_val).is_ok() {
+ usize::max_value()
+ } else {
+ 0
+ }
+ }
+ fn propose(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val2>) {
+ panic!("FilterWith::propose(): variable apparently unbound.");
+ }
+ fn intersect(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val2>) {
+ // Only here because we didn't return zero above, right?
+ }
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> Leapers<'leap, Tuple, ()>
+ for FilterWith<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ fn for_each_count(&mut self, tuple: &Tuple, mut op: impl FnMut(usize, usize)) {
+ if <Self as Leaper<Tuple, ()>>::count(self, tuple) == 0 {
+ op(0, 0)
+ } else {
+ op(0, 1)
+ }
+ }
+
+ fn propose(&mut self, _: &Tuple, min_index: usize, values: &mut Vec<&'leap ()>) {
+ assert_eq!(min_index, 0);
+ values.push(&());
+ }
+
+ fn intersect(&mut self, _: &Tuple, min_index: usize, values: &mut Vec<&'leap ()>) {
+ assert_eq!(min_index, 0);
+ assert_eq!(values.len(), 1);
+ }
+ }
+}
+
+pub(crate) mod filter_anti {
+
+ use super::{Leaper, Leapers, Relation};
+
+ /// Wraps a Relation<Tuple> as a leaper.
+ pub struct FilterAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ relation: &'leap Relation<(Key, Val)>,
+ key_func: Func,
+ phantom: ::std::marker::PhantomData<Tuple>,
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> FilterAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ /// Constructs a FilterAnti from a relation and key and value function.
+ pub fn from(relation: &'leap Relation<(Key, Val)>, key_func: Func) -> Self {
+ FilterAnti {
+ relation,
+ key_func,
+ phantom: ::std::marker::PhantomData,
+ }
+ }
+ }
+
+ impl<'leap, Key: Ord, Val: Ord + 'leap, Val2, Tuple: Ord, Func> Leaper<'leap, Tuple, Val2>
+ for FilterAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ fn count(&mut self, prefix: &Tuple) -> usize {
+ let key_val = (self.key_func)(prefix);
+ if self.relation.binary_search(&key_val).is_ok() {
+ 0
+ } else {
+ usize::max_value()
+ }
+ }
+ fn propose(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val2>) {
+ panic!("FilterAnti::propose(): variable apparently unbound.");
+ }
+ fn intersect(&mut self, _prefix: &Tuple, _values: &mut Vec<&'leap Val2>) {
+ // Only here because we didn't return zero above, right?
+ }
+ }
+
+ impl<'leap, Key, Val, Tuple, Func> Leapers<'leap, Tuple, ()>
+ for FilterAnti<'leap, Key, Val, Tuple, Func>
+ where
+ Key: Ord + 'leap,
+ Val: Ord + 'leap,
+ Tuple: Ord,
+ Func: Fn(&Tuple) -> (Key, Val),
+ {
+ fn for_each_count(&mut self, tuple: &Tuple, mut op: impl FnMut(usize, usize)) {
+ if <Self as Leaper<Tuple, ()>>::count(self, tuple) == 0 {
+ op(0, 0)
+ } else {
+ op(0, 1)
+ }
+ }
+
+ fn propose(&mut self, _: &Tuple, min_index: usize, values: &mut Vec<&'leap ()>) {
+ // We only get here if `tuple` is *not* a member of `self.relation`
+ assert_eq!(min_index, 0);
+ values.push(&());
+ }
+
+ fn intersect(&mut self, _: &Tuple, min_index: usize, values: &mut Vec<&'leap ()>) {
+ // We only get here if `tuple` is not a member of `self.relation`
+ assert_eq!(min_index, 0);
+ assert_eq!(values.len(), 1);
+ }
+ }
+}
+
+fn binary_search<T>(slice: &[T], mut cmp: impl FnMut(&T) -> bool) -> usize {
+ // we maintain the invariant that `lo` many elements of `slice` satisfy `cmp`.
+ // `hi` is maintained at the first element we know does not satisfy `cmp`.
+
+ let mut hi = slice.len();
+ let mut lo = 0;
+ while lo < hi {
+ let mid = lo + (hi - lo) / 2;
+ if cmp(&slice[mid]) {
+ lo = mid + 1;
+ } else {
+ hi = mid;
+ }
+ }
+ lo
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/.cargo-checksum.json b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/.cargo-checksum.json
new file mode 100644
index 0000000..42ea021
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CHANGELOG.md":"df7d7ea4256611dd5e3bf160e39bb3f8b665c6805ae47fdbf28acf9f77245ffd","Cargo.toml":"2161251dd0dfbea680a9d5fd762973e343fc5215794681c5ffd641faab9a4e4c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"a23bbe55ac94081711c081a63df10d324a8a26f4b836952cb3c45c9318a03152","benches/value.rs":"b613ff353d3cf0ef8cb98e4ca461ea929b8ba553fe299f2eb2942d77a5b1b6a0","src/__private_api.rs":"da677f1e29e3cb135c971247031bc0eb20324294ab5c1c74c5118f87e45518ae","src/kv/error.rs":"6dae12424164c33b93915f5e70bd6d99d616c969c8bfb543806721dd9b423981","src/kv/key.rs":"9439e91c3ab3f9574a6a11a0347c7b63fdf1652384a6b28411136e4373de2970","src/kv/mod.rs":"3521a5bcfd7f92dcfac6c3c948020d686fee696596c566333a27edbbcc8a4ea8","src/kv/source.rs":"73fbc180c824072d86f1f41f8c59c014db1d8988a86be38a9128d67d6aab06a5","src/kv/value.rs":"0aade52b8e3523a17d6114f8b664793862032a94ea1ee2a4f12a20dd729b92d4","src/lib.rs":"55c32130cd8b99cde2ea962a403cdade52d20e80088357ba2784ee53b2eb9a2c","src/macros.rs":"dfb98017d5f205fec632069ab857a18661d6d563cf5162eeef64d367cc3ad7f5","src/serde.rs":"35f520f62fdba0216ccee33e5b66ad8f81dee3af5b65b824f1816180c9350df5","triagebot.toml":"a135e10c777cd13459559bdf74fb704c1379af7c9b0f70bc49fa6f5a837daa81"},"package":"a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"} \ No newline at end of file
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/CHANGELOG.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/CHANGELOG.md
new file mode 100644
index 0000000..2c89834
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/CHANGELOG.md
@@ -0,0 +1,324 @@
+# Change Log
+
+## [Unreleased]
+
+## [0.4.22] - 2024-06-27
+
+## What's Changed
+* Add some clarifications to the library docs by @KodrAus in https://github.com/rust-lang/log/pull/620
+* Add links to `colog` crate by @chrivers in https://github.com/rust-lang/log/pull/621
+* adding line_number test + updating some testing infrastructure by @DIvkov575 in https://github.com/rust-lang/log/pull/619
+* Clarify the actual set of functions that can race in _racy variants by @KodrAus in https://github.com/rust-lang/log/pull/623
+* Replace deprecated std::sync::atomic::spin_loop_hint() by @Catamantaloedis in https://github.com/rust-lang/log/pull/625
+* Check usage of max_level features by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/627
+* Remove unneeded import by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/628
+* Loosen orderings for logger initialization in https://github.com/rust-lang/log/pull/632. Originally by @pwoolcoc in https://github.com/rust-lang/log/pull/599
+* Use Location::caller() for file and line info in https://github.com/rust-lang/log/pull/633. Originally by @Cassy343 in https://github.com/rust-lang/log/pull/520
+
+## New Contributors
+* @chrivers made their first contribution in https://github.com/rust-lang/log/pull/621
+* @DIvkov575 made their first contribution in https://github.com/rust-lang/log/pull/619
+* @Catamantaloedis made their first contribution in https://github.com/rust-lang/log/pull/625
+
+**Full Changelog**: https://github.com/rust-lang/log/compare/0.4.21...0.4.22
+
+## [0.4.21] - 2024-02-27
+
+## What's Changed
+* Minor clippy nits by @nyurik in https://github.com/rust-lang/log/pull/578
+* Simplify Display impl by @nyurik in https://github.com/rust-lang/log/pull/579
+* Set all crates to 2021 edition by @nyurik in https://github.com/rust-lang/log/pull/580
+* Various changes based on review by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/583
+* Fix typo in file_static() method doc by @dimo414 in https://github.com/rust-lang/log/pull/590
+* Specialize empty key value pairs by @EFanZh in https://github.com/rust-lang/log/pull/576
+* Fix incorrect lifetime in Value::to_str() by @peterjoel in https://github.com/rust-lang/log/pull/587
+* Remove some API of the key-value feature by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/585
+* Add logcontrol-log and log-reload by @swsnr in https://github.com/rust-lang/log/pull/595
+* Add Serialization section to kv::Value docs by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/593
+* Rename Value::to_str to to_cow_str by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/592
+* Clarify documentation and simplify initialization of `STATIC_MAX_LEVEL` by @ptosi in https://github.com/rust-lang/log/pull/594
+* Update docs to 2021 edition, test by @nyurik in https://github.com/rust-lang/log/pull/577
+* Add "alterable_logger" link to README.md by @brummer-simon in https://github.com/rust-lang/log/pull/589
+* Normalize line ending by @EFanZh in https://github.com/rust-lang/log/pull/602
+* Remove `ok_or` in favor of `Option::ok_or` by @AngelicosPhosphoros in https://github.com/rust-lang/log/pull/607
+* Use `Acquire` ordering for initialization check by @AngelicosPhosphoros in https://github.com/rust-lang/log/pull/610
+* Get structured logging API ready for stabilization by @KodrAus in https://github.com/rust-lang/log/pull/613
+
+## New Contributors
+* @nyurik made their first contribution in https://github.com/rust-lang/log/pull/578
+* @dimo414 made their first contribution in https://github.com/rust-lang/log/pull/590
+* @peterjoel made their first contribution in https://github.com/rust-lang/log/pull/587
+* @ptosi made their first contribution in https://github.com/rust-lang/log/pull/594
+* @brummer-simon made their first contribution in https://github.com/rust-lang/log/pull/589
+* @AngelicosPhosphoros made their first contribution in https://github.com/rust-lang/log/pull/607
+
+## [0.4.20] - 2023-07-11
+
+* Remove rustversion dev-dependency by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/568
+* Remove `local_inner_macros` usage by @EFanZh in https://github.com/rust-lang/log/pull/570
+
+## [0.4.19] - 2023-06-10
+
+* Use target_has_atomic instead of the old atomic_cas cfg by @GuillaumeGomez in https://github.com/rust-lang/log/pull/555
+* Put MSRV into Cargo.toml by @est31 in https://github.com/rust-lang/log/pull/557
+
+## [0.4.18] - 2023-05-28
+
+* fix markdown links (again) by @hellow554 in https://github.com/rust-lang/log/pull/513
+* add cargo doc to workflow by @hellow554 in https://github.com/rust-lang/log/pull/515
+* Apply Clippy lints by @hellow554 in https://github.com/rust-lang/log/pull/516
+* Replace ad-hoc eq_ignore_ascii_case with slice::eq_ignore_ascii_case by @glandium in https://github.com/rust-lang/log/pull/519
+* fix up windows targets by @KodrAus in https://github.com/rust-lang/log/pull/528
+* typo fix by @jiangying000 in https://github.com/rust-lang/log/pull/529
+* Remove dependency on cfg_if by @EriKWDev in https://github.com/rust-lang/log/pull/536
+* GitHub Workflows security hardening by @sashashura in https://github.com/rust-lang/log/pull/538
+* Fix build status badge by @atouchet in https://github.com/rust-lang/log/pull/539
+* Add call_logger to the documentation by @a1ecbr0wn in https://github.com/rust-lang/log/pull/547
+* Use stable internals for key-value API by @KodrAus in https://github.com/rust-lang/log/pull/550
+* Change wording of list of implementations by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/553
+* Add std-logger to list of implementations by @Thomasdezeeuw in https://github.com/rust-lang/log/pull/554
+* Add `set_max_level_racy` and gate `set_max_level` by @djkoloski in https://github.com/rust-lang/log/pull/544
+* [doc] src/lib.rs : prefix an unused variable with an underscore by @OccupyMars2025 in https://github.com/rust-lang/log/pull/561
+* [doc] src/macros.rs : correct grammar errors of an example in lib documentation by @OccupyMars2025 in https://github.com/rust-lang/log/pull/562
+
+## [0.4.17] - 2022-04-29
+
+* Update `kv_unstable` internal dependencies.
+
+## [0.4.16] - 2022-03-22
+
+* Fix a conflict with unqualified `Option` use in macros.
+
+## [0.4.15] - 2022-02-23
+
+* Silence a warning about the deprecated `spin_loop_hint`.
+* Relax ordering in the atomic `set_max_level` call.
+* Add thumbv4t-none-eabi to targets that don't support atomics
+* Allow levels to be iterated over.
+* Implement `Log` on some common wrapper types.
+* Improvements to test coverage.
+* Improvements to documentation.
+* Add key-value support to the `log!` macros.
+* Tighten `kv_unstable` internal dependencies so they don't bump past their current alpha.
+* Add a simple visit API to `kv_unstable`.
+* Support `NonZero*` integers as values in structured logging
+* Support static strings as keys in structured logging
+
+## [0.4.14] - 2021-01-27
+
+* Remove the `__private_api_log_lit` special case.
+* Fixed incorrect combination of `kv_unstable` and `std` features causing compile failures.
+* Remove unstable `Value::to_*` conversions that were incorrectly using `as`.
+* Rename unstable `Value::to_error` to `Value::to_borrowed_error`.
+
+## [0.4.13] - 2021-01-11
+
+* This is the same as `0.4.11`, except with a `kv_unstable_std` feature added to aid migrating current dependents to `0.4.14` (which was originally going to be `0.4.13` until it was decided to create a patch from `0.4.11` to minimize disruption).
+
+## [0.4.12] - 2020-12-24
+
+### New
+
+* Support platforms without atomics by racing instead of failing to compile
+* Implement `Log` for `Box<T: Log>`
+* Update `cfg-if` to `1.0`
+* Internal reworks of the structured logging API. Removed the `Fill` API
+and added `source::as_map` and `source::as_list` to easily serialize a `Source`
+as either a map of `{key: value, ..}` or as a list of `[(key, value), ..]`.
+
+### Fixed
+
+* Fixed deserialization of `LevelFilter` to use their `u64` index variants
+
+## [0.4.11] - 2020-07-09
+
+### New
+
+* Support coercing structured values into concrete types.
+* Reference the `win_dbg_logger` in the readme.
+
+### Fixed
+
+* Updates a few deprecated items used internally.
+* Fixed issues in docs and expands sections.
+* Show the correct build badge in the readme.
+* Fix up a possible inference breakage with structured value errors.
+* Respect formatting flags in structured value formatting.
+
+## [0.4.10] - 2019-12-16 (yanked)
+
+### Fixed
+
+* Fixed the `log!` macros so they work in expression context (this regressed in `0.4.9`, which has been yanked).
+
+## [0.4.9] - 2019-12-12 (yanked)
+
+### Minimum Supported Rust Version
+
+This release bumps the minimum compiler version to `1.31.0`. This was mainly needed for `cfg-if`,
+but between `1.16.0` and `1.31.0` there are a lot of language and library improvements we now
+take advantage of.
+
+### New
+
+* Unstable support for capturing key-value pairs in a record using the `log!` macros
+
+### Improved
+
+* Better documentation for max level filters.
+* Internal updates to line up with bumped MSRV
+
+## [0.4.8] - 2019-07-28
+
+### New
+
+* Support attempting to get `Record` fields as static strings.
+
+## [0.4.7] - 2019-07-06
+
+### New
+
+* Support for embedded environments with thread-unsafe initialization.
+* Initial unstable support for capturing structured data under the `kv_unstable`
+feature gate. This new API doesn't affect existing users and may change in future
+patches (so those changes may not appear in the changelog until it stabilizes).
+
+### Improved
+
+* Docs for using `log` with the 2018 edition.
+* Error messages for macros missing arguments.
+
+## [0.4.6] - 2018-10-27
+
+### Improved
+
+* Support 2018-style macro import for the `log_enabled!` macro.
+
+## [0.4.5] - 2018-09-03
+
+### Improved
+
+* Make `log`'s internal helper macros less likely to conflict with user-defined
+ macros.
+
+## [0.4.4] - 2018-08-17
+
+### Improved
+
+* Support 2018-style imports of the log macros.
+
+## [0.4.3] - 2018-06-29
+
+### Improved
+
+* More code generation improvements.
+
+## [0.4.2] - 2018-06-05
+
+### Improved
+
+* Log invocations now generate less code.
+
+### Fixed
+
+* Example Logger implementations now properly set the max log level.
+
+## [0.4.1] - 2017-12-30
+
+### Fixed
+
+* Some doc links were fixed.
+
+## [0.4.0] - 2017-12-24
+
+The changes in this release include cleanup of some obscure functionality and a more robust public
+API designed to support bridges to other logging systems, and provide more flexibility to new
+features in the future.
+
+### Compatibility
+
+Vast portions of the Rust ecosystem use the 0.3.x release series of log, and we don't want to force
+the community to go through the pain of upgrading every crate to 0.4.x at the exact same time. Along
+with 0.4.0, we've published a new 0.3.9 release which acts as a "shim" over 0.4.0. This will allow
+crates using either version to coexist without losing messages from one side or the other.
+
+There is one caveat - a log message generated by a crate using 0.4.x but consumed by a logging
+implementation using 0.3.x will not have a file name or module path. Applications affected by this
+can upgrade their logging implementations to one using 0.4.x to avoid losing this information. The
+other direction does not lose any information, fortunately!
+
+**TL;DR** Libraries should feel comfortable upgrading to 0.4.0 without treating that as a breaking
+change. Applications may need to update their logging implementation (e.g. env-logger) to a newer
+version using log 0.4.x to avoid losing module and file information.
+
+### New
+
+* The crate is now `no_std` by default.
+* `Level` and `LevelFilter` now implement `Serialize` and `Deserialize` when the `serde` feature is
+ enabled.
+* The `Record` and `Metadata` types can now be constructed by third-party code via a builder API.
+* The `logger` free function returns a reference to the logger implementation. This, along with the
+ ability to construct `Record`s, makes it possible to bridge from another logging framework to
+ this one without digging into the private internals of the crate. The standard `error!` `warn!`,
+ etc, macros now exclusively use the public API of the crate rather than "secret" internal APIs.
+* `Log::flush` has been added to allow crates to tell the logging implementation to ensure that all
+ "in flight" log events have been persisted. This can be used, for example, just before an
+ application exits to ensure that asynchronous log sinks finish their work.
+
+### Removed
+
+* The `shutdown` and `shutdown_raw` functions have been removed. Supporting shutdown significantly
+ complicated the implementation and imposed a performance cost on each logging operation.
+* The `log_panics` function and its associated `nightly` Cargo feature have been removed. Use the
+ [log-panics](https://crates.io/crates/log-panics) instead.
+
+### Changed
+
+* The `Log` prefix has been removed from type names. For example, `LogLevelFilter` is now
+ `LevelFilter`, and `LogRecord` is now `Record`.
+* The `MaxLogLevelFilter` object has been removed in favor of a `set_max_level` free function.
+* The `set_logger` free functions have been restructured. The logger is now directly passed to the
+ functions rather than a closure which returns the logger. `set_logger` now takes a `&'static
+ Log` and is usable in `no_std` contexts in place of the old `set_logger_raw`. `set_boxed_logger`
+ is a convenience function which takes a `Box<Log>` but otherwise acts like `set_logger`. It
+ requires the `std` feature.
+* The `file` and `module_path` values in `Record` no longer have the `'static` lifetime to support
+ integration with other logging frameworks that don't provide a `'static` lifetime for the
+ equivalent values.
+* The `file`, `line`, and `module_path` values in `Record` are now `Option`s to support integration
+ with other logging frameworks that don't provide those values.
+
+### In the Future
+
+* We're looking to add support for *structured* logging - the inclusion of extra key-value pairs of
+ information in a log event in addition to the normal string message. This should be able to be
+ added in a backwards compatible manner to the 0.4.x series when the design is worked out.
+
+## Older
+
+Look at the [release tags] for information about older releases.
+
+[Unreleased]: https://github.com/rust-lang-nursery/log/compare/0.4.21...HEAD
+[0.4.21]: https://github.com/rust-lang/log/compare/0.4.20...0.4.21
+[0.4.20]: https://github.com/rust-lang-nursery/log/compare/0.4.19...0.4.20
+[0.4.19]: https://github.com/rust-lang-nursery/log/compare/0.4.18...0.4.19
+[0.4.18]: https://github.com/rust-lang-nursery/log/compare/0.4.17...0.4.18
+[0.4.17]: https://github.com/rust-lang-nursery/log/compare/0.4.16...0.4.17
+[0.4.16]: https://github.com/rust-lang-nursery/log/compare/0.4.15...0.4.16
+[0.4.15]: https://github.com/rust-lang-nursery/log/compare/0.4.13...0.4.15
+[0.4.14]: https://github.com/rust-lang-nursery/log/compare/0.4.13...0.4.14
+[0.4.13]: https://github.com/rust-lang-nursery/log/compare/0.4.11...0.4.13
+[0.4.12]: https://github.com/rust-lang-nursery/log/compare/0.4.11...0.4.12
+[0.4.11]: https://github.com/rust-lang-nursery/log/compare/0.4.10...0.4.11
+[0.4.10]: https://github.com/rust-lang-nursery/log/compare/0.4.9...0.4.10
+[0.4.9]: https://github.com/rust-lang-nursery/log/compare/0.4.8...0.4.9
+[0.4.8]: https://github.com/rust-lang-nursery/log/compare/0.4.7...0.4.8
+[0.4.7]: https://github.com/rust-lang-nursery/log/compare/0.4.6...0.4.7
+[0.4.6]: https://github.com/rust-lang-nursery/log/compare/0.4.5...0.4.6
+[0.4.5]: https://github.com/rust-lang-nursery/log/compare/0.4.4...0.4.5
+[0.4.4]: https://github.com/rust-lang-nursery/log/compare/0.4.3...0.4.4
+[0.4.3]: https://github.com/rust-lang-nursery/log/compare/0.4.2...0.4.3
+[0.4.2]: https://github.com/rust-lang-nursery/log/compare/0.4.1...0.4.2
+[0.4.1]: https://github.com/rust-lang-nursery/log/compare/0.4.0...0.4.1
+[0.4.0]: https://github.com/rust-lang-nursery/log/compare/0.3.8...0.4.0
+[release tags]: https://github.com/rust-lang-nursery/log/releases
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/Cargo.toml b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/Cargo.toml
new file mode 100644
index 0000000..313a005
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/Cargo.toml
@@ -0,0 +1,139 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+rust-version = "1.60.0"
+name = "log"
+version = "0.4.22"
+authors = ["The Rust Project Developers"]
+exclude = ["rfcs/**/*"]
+description = """
+A lightweight logging facade for Rust
+"""
+documentation = "https://docs.rs/log"
+readme = "README.md"
+keywords = ["logging"]
+categories = ["development-tools::debugging"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/log"
+
+[package.metadata.docs.rs]
+features = [
+ "std",
+ "serde",
+ "kv_std",
+ "kv_sval",
+ "kv_serde",
+]
+
+[[test]]
+name = "integration"
+path = "tests/integration.rs"
+harness = false
+
+[[test]]
+name = "macros"
+path = "tests/macros.rs"
+harness = true
+
+[dependencies.serde]
+version = "1.0"
+optional = true
+default-features = false
+
+[dependencies.sval]
+version = "2.1"
+optional = true
+default-features = false
+
+[dependencies.sval_ref]
+version = "2.1"
+optional = true
+default-features = false
+
+[dependencies.value-bag]
+version = "1.7"
+features = ["inline-i128"]
+optional = true
+default-features = false
+
+[dev-dependencies.proc-macro2]
+version = "1.0.63"
+default-features = false
+
+[dev-dependencies.serde]
+version = "1.0"
+features = ["derive"]
+
+[dev-dependencies.serde_json]
+version = "1.0"
+
+[dev-dependencies.serde_test]
+version = "1.0"
+
+[dev-dependencies.sval]
+version = "2.1"
+
+[dev-dependencies.sval_derive]
+version = "2.1"
+
+[dev-dependencies.value-bag]
+version = "1.7"
+features = ["test"]
+
+[features]
+kv = []
+kv_serde = [
+ "kv_std",
+ "value-bag/serde",
+ "serde",
+]
+kv_std = [
+ "std",
+ "kv",
+ "value-bag/error",
+]
+kv_sval = [
+ "kv",
+ "value-bag/sval",
+ "sval",
+ "sval_ref",
+]
+kv_unstable = [
+ "kv",
+ "value-bag",
+]
+kv_unstable_serde = [
+ "kv_serde",
+ "kv_unstable_std",
+]
+kv_unstable_std = [
+ "kv_std",
+ "kv_unstable",
+]
+kv_unstable_sval = [
+ "kv_sval",
+ "kv_unstable",
+]
+max_level_debug = []
+max_level_error = []
+max_level_info = []
+max_level_off = []
+max_level_trace = []
+max_level_warn = []
+release_max_level_debug = []
+release_max_level_error = []
+release_max_level_info = []
+release_max_level_off = []
+release_max_level_trace = []
+release_max_level_warn = []
+std = []
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-APACHE b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-APACHE
new file mode 100644
index 0000000..16fe87b
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-MIT b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-MIT
new file mode 100644
index 0000000..39d4bdb
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2014 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/README.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/README.md
new file mode 100644
index 0000000..d4a08b1
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/README.md
@@ -0,0 +1,130 @@
+log
+===
+
+A Rust library providing a lightweight logging *facade*.
+
+[![Build status](https://img.shields.io/github/actions/workflow/status/rust-lang/log/main.yml?branch=master)](https://github.com/rust-lang/log/actions)
+[![Latest version](https://img.shields.io/crates/v/log.svg)](https://crates.io/crates/log)
+[![Documentation](https://docs.rs/log/badge.svg)](https://docs.rs/log)
+![License](https://img.shields.io/crates/l/log.svg)
+
+* [`log` documentation](https://docs.rs/log)
+
+A logging facade provides a single logging API that abstracts over the actual
+logging implementation. Libraries can use the logging API provided by this
+crate, and the consumer of those libraries can choose the logging
+implementation that is most suitable for its use case.
+
+
+## Minimum supported `rustc`
+
+`1.60.0+`
+
+This version is explicitly tested in CI and may be bumped in any release as needed. Maintaining compatibility with older compilers is a priority though, so the bar for bumping the minimum supported version is set very high. Any changes to the supported minimum version will be called out in the release notes.
+
+## Usage
+
+### In libraries
+
+Libraries should link only to the `log` crate, and use the provided macros to
+log whatever information will be useful to downstream consumers:
+
+```toml
+[dependencies]
+log = "0.4"
+```
+
+```rust
+use log::{info, trace, warn};
+
+pub fn shave_the_yak(yak: &mut Yak) {
+ trace!("Commencing yak shaving");
+
+ loop {
+ match find_a_razor() {
+ Ok(razor) => {
+ info!("Razor located: {razor}");
+ yak.shave(razor);
+ break;
+ }
+ Err(err) => {
+ warn!("Unable to locate a razor: {err}, retrying");
+ }
+ }
+ }
+}
+```
+
+### In executables
+
+In order to produce log output, executables have to use a logger implementation compatible with the facade.
+There are many available implementations to choose from, here are some options:
+
+* Simple minimal loggers:
+ * [`env_logger`](https://docs.rs/env_logger/*/env_logger/)
+ * [`colog`](https://docs.rs/colog/*/colog/)
+ * [`simple_logger`](https://docs.rs/simple_logger/*/simple_logger/)
+ * [`simplelog`](https://docs.rs/simplelog/*/simplelog/)
+ * [`pretty_env_logger`](https://docs.rs/pretty_env_logger/*/pretty_env_logger/)
+ * [`stderrlog`](https://docs.rs/stderrlog/*/stderrlog/)
+ * [`flexi_logger`](https://docs.rs/flexi_logger/*/flexi_logger/)
+ * [`call_logger`](https://docs.rs/call_logger/*/call_logger/)
+ * [`std-logger`](https://docs.rs/std-logger/*/std_logger/)
+ * [`structured-logger`](https://docs.rs/structured-logger/latest/structured_logger/)
+* Complex configurable frameworks:
+ * [`log4rs`](https://docs.rs/log4rs/*/log4rs/)
+ * [`fern`](https://docs.rs/fern/*/fern/)
+* Adaptors for other facilities:
+ * [`syslog`](https://docs.rs/syslog/*/syslog/)
+ * [`systemd-journal-logger`](https://docs.rs/systemd-journal-logger/*/systemd_journal_logger/)
+ * [`slog-stdlog`](https://docs.rs/slog-stdlog/*/slog_stdlog/)
+ * [`android_log`](https://docs.rs/android_log/*/android_log/)
+ * [`win_dbg_logger`](https://docs.rs/win_dbg_logger/*/win_dbg_logger/)
+ * [`db_logger`](https://docs.rs/db_logger/*/db_logger/)
+ * [`log-to-defmt`](https://docs.rs/log-to-defmt/*/log_to_defmt/)
+ * [`logcontrol-log`](https://docs.rs/logcontrol-log/*/logcontrol_log/)
+* For WebAssembly binaries:
+ * [`console_log`](https://docs.rs/console_log/*/console_log/)
+* For dynamic libraries:
+ * You may need to construct [an FFI-safe wrapper over `log`](https://github.com/rust-lang/log/issues/421) to initialize in your libraries.
+* Utilities:
+ * [`log_err`](https://docs.rs/log_err/*/log_err/)
+ * [`log-reload`](https://docs.rs/log-reload/*/log_reload/)
+ * [`alterable_logger`](https://docs.rs/alterable_logger/*/alterable_logger)
+
+Executables should choose a logger implementation and initialize it early in the
+runtime of the program. Logger implementations will typically include a
+function to do this. Any log messages generated before the logger is
+initialized will be ignored.
+
+The executable itself may use the `log` crate to log as well.
+
+## Structured logging
+
+If you enable the `kv` feature, you can associate structured data with your log records:
+
+```rust
+use log::{info, trace, warn};
+
+pub fn shave_the_yak(yak: &mut Yak) {
+ // `yak:serde` will capture `yak` using its `serde::Serialize` impl
+ //
+ // You could also use `:?` for `Debug`, or `:%` for `Display`. For a
+ // full list, see the `log` crate documentation
+ trace!(target = "yak_events", yak:serde; "Commencing yak shaving");
+
+ loop {
+ match find_a_razor() {
+ Ok(razor) => {
+ info!(razor; "Razor located");
+ yak.shave(razor);
+ break;
+ }
+ Err(e) => {
+ // `e:err` will capture `e` using its `std::error::Error` impl
+ warn!(e:err; "Unable to locate a razor, retrying");
+ }
+ }
+ }
+}
+```
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/benches/value.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/benches/value.rs
new file mode 100644
index 0000000..3d0f18b
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/benches/value.rs
@@ -0,0 +1,27 @@
+#![cfg(feature = "kv")]
+#![feature(test)]
+
+use log::kv::Value;
+
+#[bench]
+fn u8_to_value(b: &mut test::Bencher) {
+ b.iter(|| Value::from(1u8));
+}
+
+#[bench]
+fn u8_to_value_debug(b: &mut test::Bencher) {
+ b.iter(|| Value::from_debug(&1u8));
+}
+
+#[bench]
+fn str_to_value_debug(b: &mut test::Bencher) {
+ b.iter(|| Value::from_debug(&"a string"));
+}
+
+#[bench]
+fn custom_to_value_debug(b: &mut test::Bencher) {
+ #[derive(Debug)]
+ struct A;
+
+ b.iter(|| Value::from_debug(&A));
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/__private_api.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/__private_api.rs
new file mode 100644
index 0000000..11bc2fc
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/__private_api.rs
@@ -0,0 +1,123 @@
+//! WARNING: this is not part of the crate's public API and is subject to change at any time
+
+use self::sealed::KVs;
+use crate::{Level, Metadata, Record};
+use std::fmt::Arguments;
+use std::panic::Location;
+pub use std::{format_args, module_path, stringify};
+
+#[cfg(not(feature = "kv"))]
+pub type Value<'a> = &'a str;
+
+mod sealed {
+ /// Types for the `kv` argument.
+ pub trait KVs<'a> {
+ fn into_kvs(self) -> Option<&'a [(&'a str, super::Value<'a>)]>;
+ }
+}
+
+// Types for the `kv` argument.
+
+impl<'a> KVs<'a> for &'a [(&'a str, Value<'a>)] {
+ #[inline]
+ fn into_kvs(self) -> Option<&'a [(&'a str, Value<'a>)]> {
+ Some(self)
+ }
+}
+
+impl<'a> KVs<'a> for () {
+ #[inline]
+ fn into_kvs(self) -> Option<&'a [(&'a str, Value<'a>)]> {
+ None
+ }
+}
+
+// Log implementation.
+
+fn log_impl(
+ args: Arguments,
+ level: Level,
+ &(target, module_path, loc): &(&str, &'static str, &'static Location),
+ kvs: Option<&[(&str, Value)]>,
+) {
+ #[cfg(not(feature = "kv"))]
+ if kvs.is_some() {
+ panic!("key-value support is experimental and must be enabled using the `kv` feature")
+ }
+
+ let mut builder = Record::builder();
+
+ builder
+ .args(args)
+ .level(level)
+ .target(target)
+ .module_path_static(Some(module_path))
+ .file_static(Some(loc.file()))
+ .line(Some(loc.line()));
+
+ #[cfg(feature = "kv")]
+ builder.key_values(&kvs);
+
+ crate::logger().log(&builder.build());
+}
+
+pub fn log<'a, K>(
+ args: Arguments,
+ level: Level,
+ target_module_path_and_loc: &(&str, &'static str, &'static Location),
+ kvs: K,
+) where
+ K: KVs<'a>,
+{
+ log_impl(args, level, target_module_path_and_loc, kvs.into_kvs())
+}
+
+pub fn enabled(level: Level, target: &str) -> bool {
+ crate::logger().enabled(&Metadata::builder().level(level).target(target).build())
+}
+
+#[track_caller]
+pub fn loc() -> &'static Location<'static> {
+ Location::caller()
+}
+
+#[cfg(feature = "kv")]
+mod kv_support {
+ use crate::kv;
+
+ pub type Value<'a> = kv::Value<'a>;
+
+ // NOTE: Many functions here accept a double reference &&V
+ // This is so V itself can be ?Sized, while still letting us
+ // erase it to some dyn Trait (because &T is sized)
+
+ pub fn capture_to_value<'a, V: kv::ToValue + ?Sized>(v: &'a &'a V) -> Value<'a> {
+ v.to_value()
+ }
+
+ pub fn capture_debug<'a, V: core::fmt::Debug + ?Sized>(v: &'a &'a V) -> Value<'a> {
+ Value::from_debug(v)
+ }
+
+ pub fn capture_display<'a, V: core::fmt::Display + ?Sized>(v: &'a &'a V) -> Value<'a> {
+ Value::from_display(v)
+ }
+
+ #[cfg(feature = "kv_std")]
+ pub fn capture_error<'a>(v: &'a (dyn std::error::Error + 'static)) -> Value<'a> {
+ Value::from_dyn_error(v)
+ }
+
+ #[cfg(feature = "kv_sval")]
+ pub fn capture_sval<'a, V: sval::Value + ?Sized>(v: &'a &'a V) -> Value<'a> {
+ Value::from_sval(v)
+ }
+
+ #[cfg(feature = "kv_serde")]
+ pub fn capture_serde<'a, V: serde::Serialize + ?Sized>(v: &'a &'a V) -> Value<'a> {
+ Value::from_serde(v)
+ }
+}
+
+#[cfg(feature = "kv")]
+pub use self::kv_support::*;
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/error.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/error.rs
new file mode 100644
index 0000000..7efa5af
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/error.rs
@@ -0,0 +1,94 @@
+use std::fmt;
+
+/// An error encountered while working with structured data.
+#[derive(Debug)]
+pub struct Error {
+ inner: Inner,
+}
+
+#[derive(Debug)]
+enum Inner {
+ #[cfg(feature = "std")]
+ Boxed(std_support::BoxedError),
+ Msg(&'static str),
+ #[cfg(feature = "value-bag")]
+ Value(crate::kv::value::inner::Error),
+ Fmt,
+}
+
+impl Error {
+ /// Create an error from a message.
+ pub fn msg(msg: &'static str) -> Self {
+ Error {
+ inner: Inner::Msg(msg),
+ }
+ }
+
+ // Not public so we don't leak the `crate::kv::value::inner` API
+ #[cfg(feature = "value-bag")]
+ pub(super) fn from_value(err: crate::kv::value::inner::Error) -> Self {
+ Error {
+ inner: Inner::Value(err),
+ }
+ }
+
+ // Not public so we don't leak the `crate::kv::value::inner` API
+ #[cfg(feature = "value-bag")]
+ pub(super) fn into_value(self) -> crate::kv::value::inner::Error {
+ match self.inner {
+ Inner::Value(err) => err,
+ #[cfg(feature = "kv_std")]
+ _ => crate::kv::value::inner::Error::boxed(self),
+ #[cfg(not(feature = "kv_std"))]
+ _ => crate::kv::value::inner::Error::msg("error inspecting a value"),
+ }
+ }
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use self::Inner::*;
+ match &self.inner {
+ #[cfg(feature = "std")]
+ Boxed(err) => err.fmt(f),
+ #[cfg(feature = "value-bag")]
+ Value(err) => err.fmt(f),
+ Msg(msg) => msg.fmt(f),
+ Fmt => fmt::Error.fmt(f),
+ }
+ }
+}
+
+impl From<fmt::Error> for Error {
+ fn from(_: fmt::Error) -> Self {
+ Error { inner: Inner::Fmt }
+ }
+}
+
+#[cfg(feature = "std")]
+mod std_support {
+ use super::*;
+ use std::{error, io};
+
+ pub(super) type BoxedError = Box<dyn error::Error + Send + Sync>;
+
+ impl Error {
+ /// Create an error from a standard error type.
+ pub fn boxed<E>(err: E) -> Self
+ where
+ E: Into<BoxedError>,
+ {
+ Error {
+ inner: Inner::Boxed(err.into()),
+ }
+ }
+ }
+
+ impl error::Error for Error {}
+
+ impl From<io::Error> for Error {
+ fn from(err: io::Error) -> Self {
+ Error::boxed(err)
+ }
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/key.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/key.rs
new file mode 100644
index 0000000..9a64b95
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/key.rs
@@ -0,0 +1,143 @@
+//! Structured keys.
+
+use std::borrow::Borrow;
+use std::fmt;
+
+/// A type that can be converted into a [`Key`](struct.Key.html).
+pub trait ToKey {
+ /// Perform the conversion.
+ fn to_key(&self) -> Key;
+}
+
+impl<'a, T> ToKey for &'a T
+where
+ T: ToKey + ?Sized,
+{
+ fn to_key(&self) -> Key {
+ (**self).to_key()
+ }
+}
+
+impl<'k> ToKey for Key<'k> {
+ fn to_key(&self) -> Key {
+ Key { key: self.key }
+ }
+}
+
+impl ToKey for str {
+ fn to_key(&self) -> Key {
+ Key::from_str(self)
+ }
+}
+
+/// A key in a key-value.
+// These impls must only be based on the as_str() representation of the key
+// If a new field (such as an optional index) is added to the key they must not affect comparison
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Key<'k> {
+ key: &'k str,
+}
+
+impl<'k> Key<'k> {
+ /// Get a key from a borrowed string.
+ pub fn from_str(key: &'k str) -> Self {
+ Key { key }
+ }
+
+ /// Get a borrowed string from this key.
+ pub fn as_str(&self) -> &str {
+ self.key
+ }
+}
+
+impl<'k> fmt::Display for Key<'k> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.key.fmt(f)
+ }
+}
+
+impl<'k> AsRef<str> for Key<'k> {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'k> Borrow<str> for Key<'k> {
+ fn borrow(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl<'k> From<&'k str> for Key<'k> {
+ fn from(s: &'k str) -> Self {
+ Key::from_str(s)
+ }
+}
+
+#[cfg(feature = "std")]
+mod std_support {
+ use super::*;
+
+ use std::borrow::Cow;
+
+ impl ToKey for String {
+ fn to_key(&self) -> Key {
+ Key::from_str(self)
+ }
+ }
+
+ impl<'a> ToKey for Cow<'a, str> {
+ fn to_key(&self) -> Key {
+ Key::from_str(self)
+ }
+ }
+}
+
+#[cfg(feature = "kv_sval")]
+mod sval_support {
+ use super::*;
+
+ use sval::Value;
+ use sval_ref::ValueRef;
+
+ impl<'a> Value for Key<'a> {
+ fn stream<'sval, S: sval::Stream<'sval> + ?Sized>(
+ &'sval self,
+ stream: &mut S,
+ ) -> sval::Result {
+ self.key.stream(stream)
+ }
+ }
+
+ impl<'a> ValueRef<'a> for Key<'a> {
+ fn stream_ref<S: sval::Stream<'a> + ?Sized>(&self, stream: &mut S) -> sval::Result {
+ self.key.stream(stream)
+ }
+ }
+}
+
+#[cfg(feature = "kv_serde")]
+mod serde_support {
+ use super::*;
+
+ use serde::{Serialize, Serializer};
+
+ impl<'a> Serialize for Key<'a> {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ self.key.serialize(serializer)
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn key_from_string() {
+ assert_eq!("a key", Key::from_str("a key").as_str());
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/mod.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/mod.rs
new file mode 100644
index 0000000..1ccb825
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/mod.rs
@@ -0,0 +1,265 @@
+//! Structured logging.
+//!
+//! Add the `kv` feature to your `Cargo.toml` to enable
+//! this module:
+//!
+//! ```toml
+//! [dependencies.log]
+//! features = ["kv"]
+//! ```
+//!
+//! # Structured logging in `log`
+//!
+//! Structured logging enhances traditional text-based log records with user-defined
+//! attributes. Structured logs can be analyzed using a variety of data processing
+//! techniques, without needing to find and parse attributes from unstructured text first.
+//!
+//! In `log`, user-defined attributes are part of a [`Source`] on the log record.
+//! Each attribute is a key-value; a pair of [`Key`] and [`Value`]. Keys are strings
+//! and values are a datum of any type that can be formatted or serialized. Simple types
+//! like strings, booleans, and numbers are supported, as well as arbitrarily complex
+//! structures involving nested objects and sequences.
+//!
+//! ## Adding key-values to log records
+//!
+//! Key-values appear before the message format in the `log!` macros:
+//!
+//! ```
+//! # use log::info;
+//! info!(a = 1; "Something of interest");
+//! ```
+//!
+//! Key-values support the same shorthand identifer syntax as `format_args`:
+//!
+//! ```
+//! # use log::info;
+//! let a = 1;
+//!
+//! info!(a; "Something of interest");
+//! ```
+//!
+//! Values are capturing using the [`ToValue`] trait by default. To capture a value
+//! using a different trait implementation, use a modifier after its key. Here's how
+//! the same example can capture `a` using its `Debug` implementation instead:
+//!
+//! ```
+//! # use log::info;
+//! info!(a:? = 1; "Something of interest");
+//! ```
+//!
+//! The following capturing modifiers are supported:
+//!
+//! - `:?` will capture the value using `Debug`.
+//! - `:debug` will capture the value using `Debug`.
+//! - `:%` will capture the value using `Display`.
+//! - `:display` will capture the value using `Display`.
+//! - `:err` will capture the value using `std::error::Error` (requires the `kv_std` feature).
+//! - `:sval` will capture the value using `sval::Value` (requires the `kv_sval` feature).
+//! - `:serde` will capture the value using `serde::Serialize` (requires the `kv_serde` feature).
+//!
+//! ## Working with key-values on log records
+//!
+//! Use the [`Record::key_values`](../struct.Record.html#method.key_values) method to access key-values.
+//!
+//! Individual values can be pulled from the source by their key:
+//!
+//! ```
+//! # fn main() -> Result<(), log::kv::Error> {
+//! use log::kv::{Source, Key, Value};
+//! # let record = log::Record::builder().key_values(&[("a", 1)]).build();
+//!
+//! // info!(a = 1; "Something of interest");
+//!
+//! let a: Value = record.key_values().get(Key::from("a")).unwrap();
+//! assert_eq!(1, a.to_i64().unwrap());
+//! # Ok(())
+//! # }
+//! ```
+//!
+//! All key-values can also be enumerated using a [`VisitSource`]:
+//!
+//! ```
+//! # fn main() -> Result<(), log::kv::Error> {
+//! use std::collections::BTreeMap;
+//!
+//! use log::kv::{self, Source, Key, Value, VisitSource};
+//!
+//! struct Collect<'kvs>(BTreeMap<Key<'kvs>, Value<'kvs>>);
+//!
+//! impl<'kvs> VisitSource<'kvs> for Collect<'kvs> {
+//! fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), kv::Error> {
+//! self.0.insert(key, value);
+//!
+//! Ok(())
+//! }
+//! }
+//!
+//! let mut visitor = Collect(BTreeMap::new());
+//!
+//! # let record = log::Record::builder().key_values(&[("a", 1), ("b", 2), ("c", 3)]).build();
+//! // info!(a = 1, b = 2, c = 3; "Something of interest");
+//!
+//! record.key_values().visit(&mut visitor)?;
+//!
+//! let collected = visitor.0;
+//!
+//! assert_eq!(
+//! vec!["a", "b", "c"],
+//! collected
+//! .keys()
+//! .map(|k| k.as_str())
+//! .collect::<Vec<_>>(),
+//! );
+//! # Ok(())
+//! # }
+//! ```
+//!
+//! [`Value`]s have methods for conversions to common types:
+//!
+//! ```
+//! # fn main() -> Result<(), log::kv::Error> {
+//! use log::kv::{Source, Key};
+//! # let record = log::Record::builder().key_values(&[("a", 1)]).build();
+//!
+//! // info!(a = 1; "Something of interest");
+//!
+//! let a = record.key_values().get(Key::from("a")).unwrap();
+//!
+//! assert_eq!(1, a.to_i64().unwrap());
+//! # Ok(())
+//! # }
+//! ```
+//!
+//! Values also have their own [`VisitValue`] type. Value visitors are a lightweight
+//! API for working with primitives types:
+//!
+//! ```
+//! # fn main() -> Result<(), log::kv::Error> {
+//! use log::kv::{self, Source, Key, VisitValue};
+//! # let record = log::Record::builder().key_values(&[("a", 1)]).build();
+//!
+//! struct IsNumeric(bool);
+//!
+//! impl<'kvs> VisitValue<'kvs> for IsNumeric {
+//! fn visit_any(&mut self, _value: kv::Value) -> Result<(), kv::Error> {
+//! self.0 = false;
+//! Ok(())
+//! }
+//!
+//! fn visit_u64(&mut self, _value: u64) -> Result<(), kv::Error> {
+//! self.0 = true;
+//! Ok(())
+//! }
+//!
+//! fn visit_i64(&mut self, _value: i64) -> Result<(), kv::Error> {
+//! self.0 = true;
+//! Ok(())
+//! }
+//!
+//! fn visit_u128(&mut self, _value: u128) -> Result<(), kv::Error> {
+//! self.0 = true;
+//! Ok(())
+//! }
+//!
+//! fn visit_i128(&mut self, _value: i128) -> Result<(), kv::Error> {
+//! self.0 = true;
+//! Ok(())
+//! }
+//!
+//! fn visit_f64(&mut self, _value: f64) -> Result<(), kv::Error> {
+//! self.0 = true;
+//! Ok(())
+//! }
+//! }
+//!
+//! // info!(a = 1; "Something of interest");
+//!
+//! let a = record.key_values().get(Key::from("a")).unwrap();
+//!
+//! let mut visitor = IsNumeric(false);
+//!
+//! a.visit(&mut visitor)?;
+//!
+//! let is_numeric = visitor.0;
+//!
+//! assert!(is_numeric);
+//! # Ok(())
+//! # }
+//! ```
+//!
+//! To serialize a value to a format like JSON, you can also use either `serde` or `sval`:
+//!
+//! ```
+//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
+//! # #[cfg(feature = "serde")]
+//! # {
+//! # use log::kv::Key;
+//! #[derive(serde::Serialize)]
+//! struct Data {
+//! a: i32, b: bool,
+//! c: &'static str,
+//! }
+//!
+//! let data = Data { a: 1, b: true, c: "Some data" };
+//!
+//! # let source = [("a", log::kv::Value::from_serde(&data))];
+//! # let record = log::Record::builder().key_values(&source).build();
+//! // info!(a = data; "Something of interest");
+//!
+//! let a = record.key_values().get(Key::from("a")).unwrap();
+//!
+//! assert_eq!("{\"a\":1,\"b\":true,\"c\":\"Some data\"}", serde_json::to_string(&a)?);
+//! # }
+//! # Ok(())
+//! # }
+//! ```
+//!
+//! The choice of serialization framework depends on the needs of the consumer.
+//! If you're in a no-std environment, you can use `sval`. In other cases, you can use `serde`.
+//! Log producers and log consumers don't need to agree on the serialization framework.
+//! A value can be captured using its `serde::Serialize` implementation and still be serialized
+//! through `sval` without losing any structure or data.
+//!
+//! Values can also always be formatted using the standard `Debug` and `Display`
+//! traits:
+//!
+//! ```
+//! # use log::kv::Key;
+//! # #[derive(Debug)]
+//! struct Data {
+//! a: i32,
+//! b: bool,
+//! c: &'static str,
+//! }
+//!
+//! let data = Data { a: 1, b: true, c: "Some data" };
+//!
+//! # let source = [("a", log::kv::Value::from_debug(&data))];
+//! # let record = log::Record::builder().key_values(&source).build();
+//! // info!(a = data; "Something of interest");
+//!
+//! let a = record.key_values().get(Key::from("a")).unwrap();
+//!
+//! assert_eq!("Data { a: 1, b: true, c: \"Some data\" }", format!("{a:?}"));
+//! ```
+
+mod error;
+mod key;
+
+#[cfg(not(feature = "kv_unstable"))]
+mod source;
+#[cfg(not(feature = "kv_unstable"))]
+mod value;
+
+pub use self::error::Error;
+pub use self::key::{Key, ToKey};
+pub use self::source::{Source, VisitSource};
+pub use self::value::{ToValue, Value, VisitValue};
+
+#[cfg(feature = "kv_unstable")]
+pub mod source;
+#[cfg(feature = "kv_unstable")]
+pub mod value;
+
+#[cfg(feature = "kv_unstable")]
+pub use self::source::Visitor;
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/source.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/source.rs
new file mode 100644
index 0000000..f463e6d
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/source.rs
@@ -0,0 +1,514 @@
+//! Sources for key-values.
+//!
+//! This module defines the [`Source`] type and supporting APIs for
+//! working with collections of key-values.
+
+use crate::kv::{Error, Key, ToKey, ToValue, Value};
+use std::fmt;
+
+/// A source of key-values.
+///
+/// The source may be a single pair, a set of pairs, or a filter over a set of pairs.
+/// Use the [`VisitSource`](trait.VisitSource.html) trait to inspect the structured data
+/// in a source.
+///
+/// A source is like an iterator over its key-values, except with a push-based API
+/// instead of a pull-based one.
+///
+/// # Examples
+///
+/// Enumerating the key-values in a source:
+///
+/// ```
+/// # fn main() -> Result<(), log::kv::Error> {
+/// use log::kv::{self, Source, Key, Value, VisitSource};
+///
+/// // A `VisitSource` that prints all key-values
+/// // VisitSources are fed the key-value pairs of each key-values
+/// struct Printer;
+///
+/// impl<'kvs> VisitSource<'kvs> for Printer {
+/// fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), kv::Error> {
+/// println!("{key}: {value}");
+///
+/// Ok(())
+/// }
+/// }
+///
+/// // A source with 3 key-values
+/// // Common collection types implement the `Source` trait
+/// let source = &[
+/// ("a", 1),
+/// ("b", 2),
+/// ("c", 3),
+/// ];
+///
+/// // Pass an instance of the `VisitSource` to a `Source` to visit it
+/// source.visit(&mut Printer)?;
+/// # Ok(())
+/// # }
+/// ```
+pub trait Source {
+ /// Visit key-values.
+ ///
+ /// A source doesn't have to guarantee any ordering or uniqueness of key-values.
+ /// If the given visitor returns an error then the source may early-return with it,
+ /// even if there are more key-values.
+ ///
+ /// # Implementation notes
+ ///
+ /// A source should yield the same key-values to a subsequent visitor unless
+ /// that visitor itself fails.
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error>;
+
+ /// Get the value for a given key.
+ ///
+ /// If the key appears multiple times in the source then which key is returned
+ /// is implementation specific.
+ ///
+ /// # Implementation notes
+ ///
+ /// A source that can provide a more efficient implementation of this method
+ /// should override it.
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ get_default(self, key)
+ }
+
+ /// Count the number of key-values that can be visited.
+ ///
+ /// # Implementation notes
+ ///
+ /// A source that knows the number of key-values upfront may provide a more
+ /// efficient implementation.
+ ///
+ /// A subsequent call to `visit` should yield the same number of key-values.
+ fn count(&self) -> usize {
+ count_default(self)
+ }
+}
+
+/// The default implementation of `Source::get`
+fn get_default<'v>(source: &'v (impl Source + ?Sized), key: Key) -> Option<Value<'v>> {
+ struct Get<'k, 'v> {
+ key: Key<'k>,
+ found: Option<Value<'v>>,
+ }
+
+ impl<'k, 'kvs> VisitSource<'kvs> for Get<'k, 'kvs> {
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ if self.key == key {
+ self.found = Some(value);
+ }
+
+ Ok(())
+ }
+ }
+
+ let mut get = Get { key, found: None };
+
+ let _ = source.visit(&mut get);
+ get.found
+}
+
+/// The default implementation of `Source::count`.
+fn count_default(source: impl Source) -> usize {
+ struct Count(usize);
+
+ impl<'kvs> VisitSource<'kvs> for Count {
+ fn visit_pair(&mut self, _: Key<'kvs>, _: Value<'kvs>) -> Result<(), Error> {
+ self.0 += 1;
+
+ Ok(())
+ }
+ }
+
+ let mut count = Count(0);
+ let _ = source.visit(&mut count);
+ count.0
+}
+
+impl<'a, T> Source for &'a T
+where
+ T: Source + ?Sized,
+{
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ Source::visit(&**self, visitor)
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ Source::get(&**self, key)
+ }
+
+ fn count(&self) -> usize {
+ Source::count(&**self)
+ }
+}
+
+impl<K, V> Source for (K, V)
+where
+ K: ToKey,
+ V: ToValue,
+{
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ visitor.visit_pair(self.0.to_key(), self.1.to_value())
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ if self.0.to_key() == key {
+ Some(self.1.to_value())
+ } else {
+ None
+ }
+ }
+
+ fn count(&self) -> usize {
+ 1
+ }
+}
+
+impl<S> Source for [S]
+where
+ S: Source,
+{
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ for source in self {
+ source.visit(visitor)?;
+ }
+
+ Ok(())
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ for source in self {
+ if let Some(found) = source.get(key.clone()) {
+ return Some(found);
+ }
+ }
+
+ None
+ }
+
+ fn count(&self) -> usize {
+ self.iter().map(Source::count).sum()
+ }
+}
+
+impl<const N: usize, S> Source for [S; N]
+where
+ S: Source,
+{
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ Source::visit(self as &[_], visitor)
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ Source::get(self as &[_], key)
+ }
+
+ fn count(&self) -> usize {
+ Source::count(self as &[_])
+ }
+}
+
+impl<S> Source for Option<S>
+where
+ S: Source,
+{
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ if let Some(source) = self {
+ source.visit(visitor)?;
+ }
+
+ Ok(())
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ self.as_ref().and_then(|s| s.get(key))
+ }
+
+ fn count(&self) -> usize {
+ self.as_ref().map_or(0, Source::count)
+ }
+}
+
+/// A visitor for the key-value pairs in a [`Source`](trait.Source.html).
+pub trait VisitSource<'kvs> {
+ /// Visit a key-value pair.
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error>;
+}
+
+impl<'a, 'kvs, T> VisitSource<'kvs> for &'a mut T
+where
+ T: VisitSource<'kvs> + ?Sized,
+{
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ (**self).visit_pair(key, value)
+ }
+}
+
+impl<'a, 'b: 'a, 'kvs> VisitSource<'kvs> for fmt::DebugMap<'a, 'b> {
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ self.entry(&key, &value);
+ Ok(())
+ }
+}
+
+impl<'a, 'b: 'a, 'kvs> VisitSource<'kvs> for fmt::DebugList<'a, 'b> {
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ self.entry(&(key, value));
+ Ok(())
+ }
+}
+
+impl<'a, 'b: 'a, 'kvs> VisitSource<'kvs> for fmt::DebugSet<'a, 'b> {
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ self.entry(&(key, value));
+ Ok(())
+ }
+}
+
+impl<'a, 'b: 'a, 'kvs> VisitSource<'kvs> for fmt::DebugTuple<'a, 'b> {
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ self.field(&key);
+ self.field(&value);
+ Ok(())
+ }
+}
+
+#[cfg(feature = "std")]
+mod std_support {
+ use super::*;
+ use std::borrow::Borrow;
+ use std::collections::{BTreeMap, HashMap};
+ use std::hash::{BuildHasher, Hash};
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ impl<S> Source for Box<S>
+ where
+ S: Source + ?Sized,
+ {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ Source::visit(&**self, visitor)
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ Source::get(&**self, key)
+ }
+
+ fn count(&self) -> usize {
+ Source::count(&**self)
+ }
+ }
+
+ impl<S> Source for Arc<S>
+ where
+ S: Source + ?Sized,
+ {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ Source::visit(&**self, visitor)
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ Source::get(&**self, key)
+ }
+
+ fn count(&self) -> usize {
+ Source::count(&**self)
+ }
+ }
+
+ impl<S> Source for Rc<S>
+ where
+ S: Source + ?Sized,
+ {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ Source::visit(&**self, visitor)
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ Source::get(&**self, key)
+ }
+
+ fn count(&self) -> usize {
+ Source::count(&**self)
+ }
+ }
+
+ impl<S> Source for Vec<S>
+ where
+ S: Source,
+ {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ Source::visit(&**self, visitor)
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ Source::get(&**self, key)
+ }
+
+ fn count(&self) -> usize {
+ Source::count(&**self)
+ }
+ }
+
+ impl<'kvs, V> VisitSource<'kvs> for Box<V>
+ where
+ V: VisitSource<'kvs> + ?Sized,
+ {
+ fn visit_pair(&mut self, key: Key<'kvs>, value: Value<'kvs>) -> Result<(), Error> {
+ (**self).visit_pair(key, value)
+ }
+ }
+
+ impl<K, V, S> Source for HashMap<K, V, S>
+ where
+ K: ToKey + Borrow<str> + Eq + Hash,
+ V: ToValue,
+ S: BuildHasher,
+ {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ for (key, value) in self {
+ visitor.visit_pair(key.to_key(), value.to_value())?;
+ }
+ Ok(())
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ HashMap::get(self, key.as_str()).map(|v| v.to_value())
+ }
+
+ fn count(&self) -> usize {
+ self.len()
+ }
+ }
+
+ impl<K, V> Source for BTreeMap<K, V>
+ where
+ K: ToKey + Borrow<str> + Ord,
+ V: ToValue,
+ {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ for (key, value) in self {
+ visitor.visit_pair(key.to_key(), value.to_value())?;
+ }
+ Ok(())
+ }
+
+ fn get(&self, key: Key) -> Option<Value<'_>> {
+ BTreeMap::get(self, key.as_str()).map(|v| v.to_value())
+ }
+
+ fn count(&self) -> usize {
+ self.len()
+ }
+ }
+
+ #[cfg(test)]
+ mod tests {
+ use crate::kv::value;
+
+ use super::*;
+
+ #[test]
+ fn count() {
+ assert_eq!(1, Source::count(&Box::new(("a", 1))));
+ assert_eq!(2, Source::count(&vec![("a", 1), ("b", 2)]));
+ }
+
+ #[test]
+ fn get() {
+ let source = vec![("a", 1), ("b", 2), ("a", 1)];
+ assert_eq!(
+ value::inner::Token::I64(1),
+ Source::get(&source, Key::from_str("a")).unwrap().to_token()
+ );
+
+ let source = Box::new(None::<(&str, i32)>);
+ assert!(Source::get(&source, Key::from_str("a")).is_none());
+ }
+
+ #[test]
+ fn hash_map() {
+ let mut map = HashMap::new();
+ map.insert("a", 1);
+ map.insert("b", 2);
+
+ assert_eq!(2, Source::count(&map));
+ assert_eq!(
+ value::inner::Token::I64(1),
+ Source::get(&map, Key::from_str("a")).unwrap().to_token()
+ );
+ }
+
+ #[test]
+ fn btree_map() {
+ let mut map = BTreeMap::new();
+ map.insert("a", 1);
+ map.insert("b", 2);
+
+ assert_eq!(2, Source::count(&map));
+ assert_eq!(
+ value::inner::Token::I64(1),
+ Source::get(&map, Key::from_str("a")).unwrap().to_token()
+ );
+ }
+ }
+}
+
+// NOTE: Deprecated; but aliases can't carry this attribute
+#[cfg(feature = "kv_unstable")]
+pub use VisitSource as Visitor;
+
+#[cfg(test)]
+mod tests {
+ use crate::kv::value;
+
+ use super::*;
+
+ #[test]
+ fn source_is_object_safe() {
+ fn _check(_: &dyn Source) {}
+ }
+
+ #[test]
+ fn visitor_is_object_safe() {
+ fn _check(_: &dyn VisitSource) {}
+ }
+
+ #[test]
+ fn count() {
+ struct OnePair {
+ key: &'static str,
+ value: i32,
+ }
+
+ impl Source for OnePair {
+ fn visit<'kvs>(&'kvs self, visitor: &mut dyn VisitSource<'kvs>) -> Result<(), Error> {
+ visitor.visit_pair(self.key.to_key(), self.value.to_value())
+ }
+ }
+
+ assert_eq!(1, Source::count(&("a", 1)));
+ assert_eq!(2, Source::count(&[("a", 1), ("b", 2)] as &[_]));
+ assert_eq!(0, Source::count(&None::<(&str, i32)>));
+ assert_eq!(1, Source::count(&OnePair { key: "a", value: 1 }));
+ }
+
+ #[test]
+ fn get() {
+ let source = &[("a", 1), ("b", 2), ("a", 1)] as &[_];
+ assert_eq!(
+ value::inner::Token::I64(1),
+ Source::get(source, Key::from_str("a")).unwrap().to_token()
+ );
+ assert_eq!(
+ value::inner::Token::I64(2),
+ Source::get(source, Key::from_str("b")).unwrap().to_token()
+ );
+ assert!(Source::get(&source, Key::from_str("c")).is_none());
+
+ let source = None::<(&str, i32)>;
+ assert!(Source::get(&source, Key::from_str("a")).is_none());
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/value.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/value.rs
new file mode 100644
index 0000000..1511dd0
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/kv/value.rs
@@ -0,0 +1,1394 @@
+//! Structured values.
+//!
+//! This module defines the [`Value`] type and supporting APIs for
+//! capturing and serializing them.
+
+use std::fmt;
+
+pub use crate::kv::Error;
+
+/// A type that can be converted into a [`Value`](struct.Value.html).
+pub trait ToValue {
+ /// Perform the conversion.
+ fn to_value(&self) -> Value;
+}
+
+impl<'a, T> ToValue for &'a T
+where
+ T: ToValue + ?Sized,
+{
+ fn to_value(&self) -> Value {
+ (**self).to_value()
+ }
+}
+
+impl<'v> ToValue for Value<'v> {
+ fn to_value(&self) -> Value {
+ Value {
+ inner: self.inner.clone(),
+ }
+ }
+}
+
+/// A value in a key-value.
+///
+/// Values are an anonymous bag containing some structured datum.
+///
+/// # Capturing values
+///
+/// There are a few ways to capture a value:
+///
+/// - Using the `Value::from_*` methods.
+/// - Using the `ToValue` trait.
+/// - Using the standard `From` trait.
+///
+/// ## Using the `Value::from_*` methods
+///
+/// `Value` offers a few constructor methods that capture values of different kinds.
+///
+/// ```
+/// use log::kv::Value;
+///
+/// let value = Value::from_debug(&42i32);
+///
+/// assert_eq!(None, value.to_i64());
+/// ```
+///
+/// ## Using the `ToValue` trait
+///
+/// The `ToValue` trait can be used to capture values generically.
+/// It's the bound used by `Source`.
+///
+/// ```
+/// # use log::kv::ToValue;
+/// let value = 42i32.to_value();
+///
+/// assert_eq!(Some(42), value.to_i64());
+/// ```
+///
+/// ## Using the standard `From` trait
+///
+/// Standard types that implement `ToValue` also implement `From`.
+///
+/// ```
+/// use log::kv::Value;
+///
+/// let value = Value::from(42i32);
+///
+/// assert_eq!(Some(42), value.to_i64());
+/// ```
+///
+/// # Data model
+///
+/// Values can hold one of a number of types:
+///
+/// - **Null:** The absence of any other meaningful value. Note that
+/// `Some(Value::null())` is not the same as `None`. The former is
+/// `null` while the latter is `undefined`. This is important to be
+/// able to tell the difference between a key-value that was logged,
+/// but its value was empty (`Some(Value::null())`) and a key-value
+/// that was never logged at all (`None`).
+/// - **Strings:** `str`, `char`.
+/// - **Booleans:** `bool`.
+/// - **Integers:** `u8`-`u128`, `i8`-`i128`, `NonZero*`.
+/// - **Floating point numbers:** `f32`-`f64`.
+/// - **Errors:** `dyn (Error + 'static)`.
+/// - **`serde`:** Any type in `serde`'s data model.
+/// - **`sval`:** Any type in `sval`'s data model.
+///
+/// # Serialization
+///
+/// Values provide a number of ways to be serialized.
+///
+/// For basic types the [`Value::visit`] method can be used to extract the
+/// underlying typed value. However this is limited in the amount of types
+/// supported (see the [`VisitValue`] trait methods).
+///
+/// For more complex types one of the following traits can be used:
+/// * `sval::Value`, requires the `kv_sval` feature.
+/// * `serde::Serialize`, requires the `kv_serde` feature.
+///
+/// You don't need a visitor to serialize values through `serde` or `sval`.
+///
+/// A value can always be serialized using any supported framework, regardless
+/// of how it was captured. If, for example, a value was captured using its
+/// `Display` implementation, it will serialize through `serde` as a string. If it was
+/// captured as a struct using `serde`, it will also serialize as a struct
+/// through `sval`, or can be formatted using a `Debug`-compatible representation.
+pub struct Value<'v> {
+ inner: inner::Inner<'v>,
+}
+
+impl<'v> Value<'v> {
+ /// Get a value from a type implementing `ToValue`.
+ pub fn from_any<T>(value: &'v T) -> Self
+ where
+ T: ToValue,
+ {
+ value.to_value()
+ }
+
+ /// Get a value from a type implementing `std::fmt::Debug`.
+ pub fn from_debug<T>(value: &'v T) -> Self
+ where
+ T: fmt::Debug,
+ {
+ Value {
+ inner: inner::Inner::from_debug(value),
+ }
+ }
+
+ /// Get a value from a type implementing `std::fmt::Display`.
+ pub fn from_display<T>(value: &'v T) -> Self
+ where
+ T: fmt::Display,
+ {
+ Value {
+ inner: inner::Inner::from_display(value),
+ }
+ }
+
+ /// Get a value from a type implementing `serde::Serialize`.
+ #[cfg(feature = "kv_serde")]
+ pub fn from_serde<T>(value: &'v T) -> Self
+ where
+ T: serde::Serialize,
+ {
+ Value {
+ inner: inner::Inner::from_serde1(value),
+ }
+ }
+
+ /// Get a value from a type implementing `sval::Value`.
+ #[cfg(feature = "kv_sval")]
+ pub fn from_sval<T>(value: &'v T) -> Self
+ where
+ T: sval::Value,
+ {
+ Value {
+ inner: inner::Inner::from_sval2(value),
+ }
+ }
+
+ /// Get a value from a dynamic `std::fmt::Debug`.
+ pub fn from_dyn_debug(value: &'v dyn fmt::Debug) -> Self {
+ Value {
+ inner: inner::Inner::from_dyn_debug(value),
+ }
+ }
+
+ /// Get a value from a dynamic `std::fmt::Display`.
+ pub fn from_dyn_display(value: &'v dyn fmt::Display) -> Self {
+ Value {
+ inner: inner::Inner::from_dyn_display(value),
+ }
+ }
+
+ /// Get a value from a dynamic error.
+ #[cfg(feature = "kv_std")]
+ pub fn from_dyn_error(err: &'v (dyn std::error::Error + 'static)) -> Self {
+ Value {
+ inner: inner::Inner::from_dyn_error(err),
+ }
+ }
+
+ /// Get a `null` value.
+ pub fn null() -> Self {
+ Value {
+ inner: inner::Inner::empty(),
+ }
+ }
+
+ /// Get a value from an internal primitive.
+ fn from_inner<T>(value: T) -> Self
+ where
+ T: Into<inner::Inner<'v>>,
+ {
+ Value {
+ inner: value.into(),
+ }
+ }
+
+ /// Inspect this value using a simple visitor.
+ ///
+ /// When the `kv_serde` or `kv_sval` features are enabled, you can also
+ /// serialize a value using its `Serialize` or `Value` implementation.
+ pub fn visit(&self, visitor: impl VisitValue<'v>) -> Result<(), Error> {
+ inner::visit(&self.inner, visitor)
+ }
+}
+
+impl<'v> fmt::Debug for Value<'v> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&self.inner, f)
+ }
+}
+
+impl<'v> fmt::Display for Value<'v> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.inner, f)
+ }
+}
+
+#[cfg(feature = "kv_serde")]
+impl<'v> serde::Serialize for Value<'v> {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where
+ S: serde::Serializer,
+ {
+ self.inner.serialize(s)
+ }
+}
+
+#[cfg(feature = "kv_sval")]
+impl<'v> sval::Value for Value<'v> {
+ fn stream<'sval, S: sval::Stream<'sval> + ?Sized>(&'sval self, stream: &mut S) -> sval::Result {
+ sval::Value::stream(&self.inner, stream)
+ }
+}
+
+#[cfg(feature = "kv_sval")]
+impl<'v> sval_ref::ValueRef<'v> for Value<'v> {
+ fn stream_ref<S: sval::Stream<'v> + ?Sized>(&self, stream: &mut S) -> sval::Result {
+ sval_ref::ValueRef::stream_ref(&self.inner, stream)
+ }
+}
+
+impl ToValue for str {
+ fn to_value(&self) -> Value {
+ Value::from(self)
+ }
+}
+
+impl<'v> From<&'v str> for Value<'v> {
+ fn from(value: &'v str) -> Self {
+ Value::from_inner(value)
+ }
+}
+
+impl ToValue for () {
+ fn to_value(&self) -> Value {
+ Value::from_inner(())
+ }
+}
+
+impl<T> ToValue for Option<T>
+where
+ T: ToValue,
+{
+ fn to_value(&self) -> Value {
+ match *self {
+ Some(ref value) => value.to_value(),
+ None => Value::from_inner(()),
+ }
+ }
+}
+
+macro_rules! impl_to_value_primitive {
+ ($($into_ty:ty,)*) => {
+ $(
+ impl ToValue for $into_ty {
+ fn to_value(&self) -> Value {
+ Value::from(*self)
+ }
+ }
+
+ impl<'v> From<$into_ty> for Value<'v> {
+ fn from(value: $into_ty) -> Self {
+ Value::from_inner(value)
+ }
+ }
+
+ impl<'v> From<&'v $into_ty> for Value<'v> {
+ fn from(value: &'v $into_ty) -> Self {
+ Value::from_inner(*value)
+ }
+ }
+ )*
+ };
+}
+
+macro_rules! impl_to_value_nonzero_primitive {
+ ($($into_ty:ident,)*) => {
+ $(
+ impl ToValue for std::num::$into_ty {
+ fn to_value(&self) -> Value {
+ Value::from(self.get())
+ }
+ }
+
+ impl<'v> From<std::num::$into_ty> for Value<'v> {
+ fn from(value: std::num::$into_ty) -> Self {
+ Value::from(value.get())
+ }
+ }
+
+ impl<'v> From<&'v std::num::$into_ty> for Value<'v> {
+ fn from(value: &'v std::num::$into_ty) -> Self {
+ Value::from(value.get())
+ }
+ }
+ )*
+ };
+}
+
+macro_rules! impl_value_to_primitive {
+ ($(#[doc = $doc:tt] $into_name:ident -> $into_ty:ty,)*) => {
+ impl<'v> Value<'v> {
+ $(
+ #[doc = $doc]
+ pub fn $into_name(&self) -> Option<$into_ty> {
+ self.inner.$into_name()
+ }
+ )*
+ }
+ }
+}
+
+impl_to_value_primitive![
+ usize, u8, u16, u32, u64, u128, isize, i8, i16, i32, i64, i128, f32, f64, char, bool,
+];
+
+#[rustfmt::skip]
+impl_to_value_nonzero_primitive![
+ NonZeroUsize, NonZeroU8, NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU128,
+ NonZeroIsize, NonZeroI8, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI128,
+];
+
+impl_value_to_primitive![
+ #[doc = "Try convert this value into a `u64`."]
+ to_u64 -> u64,
+ #[doc = "Try convert this value into a `i64`."]
+ to_i64 -> i64,
+ #[doc = "Try convert this value into a `u128`."]
+ to_u128 -> u128,
+ #[doc = "Try convert this value into a `i128`."]
+ to_i128 -> i128,
+ #[doc = "Try convert this value into a `f64`."]
+ to_f64 -> f64,
+ #[doc = "Try convert this value into a `char`."]
+ to_char -> char,
+ #[doc = "Try convert this value into a `bool`."]
+ to_bool -> bool,
+];
+
+impl<'v> Value<'v> {
+ /// Try convert this value into an error.
+ #[cfg(feature = "kv_std")]
+ pub fn to_borrowed_error(&self) -> Option<&(dyn std::error::Error + 'static)> {
+ self.inner.to_borrowed_error()
+ }
+
+ /// Try convert this value into a borrowed string.
+ pub fn to_borrowed_str(&self) -> Option<&str> {
+ self.inner.to_borrowed_str()
+ }
+}
+
+#[cfg(feature = "kv_std")]
+mod std_support {
+ use std::borrow::Cow;
+ use std::rc::Rc;
+ use std::sync::Arc;
+
+ use super::*;
+
+ impl<T> ToValue for Box<T>
+ where
+ T: ToValue + ?Sized,
+ {
+ fn to_value(&self) -> Value {
+ (**self).to_value()
+ }
+ }
+
+ impl<T> ToValue for Arc<T>
+ where
+ T: ToValue + ?Sized,
+ {
+ fn to_value(&self) -> Value {
+ (**self).to_value()
+ }
+ }
+
+ impl<T> ToValue for Rc<T>
+ where
+ T: ToValue + ?Sized,
+ {
+ fn to_value(&self) -> Value {
+ (**self).to_value()
+ }
+ }
+
+ impl ToValue for String {
+ fn to_value(&self) -> Value {
+ Value::from(&**self)
+ }
+ }
+
+ impl<'v> ToValue for Cow<'v, str> {
+ fn to_value(&self) -> Value {
+ Value::from(&**self)
+ }
+ }
+
+ impl<'v> Value<'v> {
+ /// Try convert this value into a string.
+ pub fn to_cow_str(&self) -> Option<Cow<'v, str>> {
+ self.inner.to_str()
+ }
+ }
+
+ impl<'v> From<&'v String> for Value<'v> {
+ fn from(v: &'v String) -> Self {
+ Value::from(&**v)
+ }
+ }
+}
+
+/// A visitor for a [`Value`].
+///
+/// Also see [`Value`'s documentation on seralization]. Value visitors are a simple alternative
+/// to a more fully-featured serialization framework like `serde` or `sval`. A value visitor
+/// can differentiate primitive types through methods like [`VisitValue::visit_bool`] and
+/// [`VisitValue::visit_str`], but more complex types like maps and sequences
+/// will fallthrough to [`VisitValue::visit_any`].
+///
+/// If you're trying to serialize a value to a format like JSON, you can use either `serde`
+/// or `sval` directly with the value. You don't need a visitor.
+///
+/// [`Value`'s documentation on seralization]: Value#serialization
+pub trait VisitValue<'v> {
+ /// Visit a `Value`.
+ ///
+ /// This is the only required method on `VisitValue` and acts as a fallback for any
+ /// more specific methods that aren't overridden.
+ /// The `Value` may be formatted using its `fmt::Debug` or `fmt::Display` implementation,
+ /// or serialized using its `sval::Value` or `serde::Serialize` implementation.
+ fn visit_any(&mut self, value: Value) -> Result<(), Error>;
+
+ /// Visit an empty value.
+ fn visit_null(&mut self) -> Result<(), Error> {
+ self.visit_any(Value::null())
+ }
+
+ /// Visit an unsigned integer.
+ fn visit_u64(&mut self, value: u64) -> Result<(), Error> {
+ self.visit_any(value.into())
+ }
+
+ /// Visit a signed integer.
+ fn visit_i64(&mut self, value: i64) -> Result<(), Error> {
+ self.visit_any(value.into())
+ }
+
+ /// Visit a big unsigned integer.
+ fn visit_u128(&mut self, value: u128) -> Result<(), Error> {
+ self.visit_any((value).into())
+ }
+
+ /// Visit a big signed integer.
+ fn visit_i128(&mut self, value: i128) -> Result<(), Error> {
+ self.visit_any((value).into())
+ }
+
+ /// Visit a floating point.
+ fn visit_f64(&mut self, value: f64) -> Result<(), Error> {
+ self.visit_any(value.into())
+ }
+
+ /// Visit a boolean.
+ fn visit_bool(&mut self, value: bool) -> Result<(), Error> {
+ self.visit_any(value.into())
+ }
+
+ /// Visit a string.
+ fn visit_str(&mut self, value: &str) -> Result<(), Error> {
+ self.visit_any(value.into())
+ }
+
+ /// Visit a string.
+ fn visit_borrowed_str(&mut self, value: &'v str) -> Result<(), Error> {
+ self.visit_str(value)
+ }
+
+ /// Visit a Unicode character.
+ fn visit_char(&mut self, value: char) -> Result<(), Error> {
+ let mut b = [0; 4];
+ self.visit_str(&*value.encode_utf8(&mut b))
+ }
+
+ /// Visit an error.
+ #[cfg(feature = "kv_std")]
+ fn visit_error(&mut self, err: &(dyn std::error::Error + 'static)) -> Result<(), Error> {
+ self.visit_any(Value::from_dyn_error(err))
+ }
+
+ /// Visit an error.
+ #[cfg(feature = "kv_std")]
+ fn visit_borrowed_error(
+ &mut self,
+ err: &'v (dyn std::error::Error + 'static),
+ ) -> Result<(), Error> {
+ self.visit_any(Value::from_dyn_error(err))
+ }
+}
+
+impl<'a, 'v, T: ?Sized> VisitValue<'v> for &'a mut T
+where
+ T: VisitValue<'v>,
+{
+ fn visit_any(&mut self, value: Value) -> Result<(), Error> {
+ (**self).visit_any(value)
+ }
+
+ fn visit_null(&mut self) -> Result<(), Error> {
+ (**self).visit_null()
+ }
+
+ fn visit_u64(&mut self, value: u64) -> Result<(), Error> {
+ (**self).visit_u64(value)
+ }
+
+ fn visit_i64(&mut self, value: i64) -> Result<(), Error> {
+ (**self).visit_i64(value)
+ }
+
+ fn visit_u128(&mut self, value: u128) -> Result<(), Error> {
+ (**self).visit_u128(value)
+ }
+
+ fn visit_i128(&mut self, value: i128) -> Result<(), Error> {
+ (**self).visit_i128(value)
+ }
+
+ fn visit_f64(&mut self, value: f64) -> Result<(), Error> {
+ (**self).visit_f64(value)
+ }
+
+ fn visit_bool(&mut self, value: bool) -> Result<(), Error> {
+ (**self).visit_bool(value)
+ }
+
+ fn visit_str(&mut self, value: &str) -> Result<(), Error> {
+ (**self).visit_str(value)
+ }
+
+ fn visit_borrowed_str(&mut self, value: &'v str) -> Result<(), Error> {
+ (**self).visit_borrowed_str(value)
+ }
+
+ fn visit_char(&mut self, value: char) -> Result<(), Error> {
+ (**self).visit_char(value)
+ }
+
+ #[cfg(feature = "kv_std")]
+ fn visit_error(&mut self, err: &(dyn std::error::Error + 'static)) -> Result<(), Error> {
+ (**self).visit_error(err)
+ }
+
+ #[cfg(feature = "kv_std")]
+ fn visit_borrowed_error(
+ &mut self,
+ err: &'v (dyn std::error::Error + 'static),
+ ) -> Result<(), Error> {
+ (**self).visit_borrowed_error(err)
+ }
+}
+
+#[cfg(feature = "value-bag")]
+pub(in crate::kv) mod inner {
+ /**
+ An implementation of `Value` based on a library called `value_bag`.
+
+ `value_bag` was written specifically for use in `log`'s value, but was split out when it outgrew
+ the codebase here. It's a general-purpose type-erasure library that handles mapping between
+ more fully-featured serialization frameworks.
+ */
+ use super::*;
+
+ pub use value_bag::ValueBag as Inner;
+
+ pub use value_bag::Error;
+
+ #[cfg(test)]
+ pub use value_bag::test::TestToken as Token;
+
+ pub fn visit<'v>(
+ inner: &Inner<'v>,
+ visitor: impl VisitValue<'v>,
+ ) -> Result<(), crate::kv::Error> {
+ struct InnerVisitValue<V>(V);
+
+ impl<'v, V> value_bag::visit::Visit<'v> for InnerVisitValue<V>
+ where
+ V: VisitValue<'v>,
+ {
+ fn visit_any(&mut self, value: value_bag::ValueBag) -> Result<(), Error> {
+ self.0
+ .visit_any(Value { inner: value })
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_empty(&mut self) -> Result<(), Error> {
+ self.0.visit_null().map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_u64(&mut self, value: u64) -> Result<(), Error> {
+ self.0
+ .visit_u64(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_i64(&mut self, value: i64) -> Result<(), Error> {
+ self.0
+ .visit_i64(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_u128(&mut self, value: u128) -> Result<(), Error> {
+ self.0
+ .visit_u128(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_i128(&mut self, value: i128) -> Result<(), Error> {
+ self.0
+ .visit_i128(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_f64(&mut self, value: f64) -> Result<(), Error> {
+ self.0
+ .visit_f64(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_bool(&mut self, value: bool) -> Result<(), Error> {
+ self.0
+ .visit_bool(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_str(&mut self, value: &str) -> Result<(), Error> {
+ self.0
+ .visit_str(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_borrowed_str(&mut self, value: &'v str) -> Result<(), Error> {
+ self.0
+ .visit_borrowed_str(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ fn visit_char(&mut self, value: char) -> Result<(), Error> {
+ self.0
+ .visit_char(value)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ #[cfg(feature = "kv_std")]
+ fn visit_error(
+ &mut self,
+ err: &(dyn std::error::Error + 'static),
+ ) -> Result<(), Error> {
+ self.0
+ .visit_error(err)
+ .map_err(crate::kv::Error::into_value)
+ }
+
+ #[cfg(feature = "kv_std")]
+ fn visit_borrowed_error(
+ &mut self,
+ err: &'v (dyn std::error::Error + 'static),
+ ) -> Result<(), Error> {
+ self.0
+ .visit_borrowed_error(err)
+ .map_err(crate::kv::Error::into_value)
+ }
+ }
+
+ inner
+ .visit(&mut InnerVisitValue(visitor))
+ .map_err(crate::kv::Error::from_value)
+ }
+}
+
+#[cfg(not(feature = "value-bag"))]
+pub(in crate::kv) mod inner {
+ /**
+ This is a dependency-free implementation of `Value` when there's no serialization frameworks involved.
+ In these simple cases a more fully featured solution like `value_bag` isn't needed, so we avoid pulling it in.
+
+ There are a few things here that need to remain consistent with the `value_bag`-based implementation:
+
+ 1. Conversions should always produce the same results. If a conversion here returns `Some`, then
+ the same `value_bag`-based conversion must also. Of particular note here are floats to ints; they're
+ based on the standard library's `TryInto` conversions, which need to be convert to `i32` or `u32`,
+ and then to `f64`.
+ 2. VisitValues should always be called in the same way. If a particular type of value calls `visit_i64`,
+ then the same `value_bag`-based visitor must also.
+ */
+ use super::*;
+
+ #[derive(Clone)]
+ pub enum Inner<'v> {
+ None,
+ Bool(bool),
+ Str(&'v str),
+ Char(char),
+ I64(i64),
+ U64(u64),
+ F64(f64),
+ I128(i128),
+ U128(u128),
+ Debug(&'v dyn fmt::Debug),
+ Display(&'v dyn fmt::Display),
+ }
+
+ impl<'v> From<()> for Inner<'v> {
+ fn from(_: ()) -> Self {
+ Inner::None
+ }
+ }
+
+ impl<'v> From<bool> for Inner<'v> {
+ fn from(v: bool) -> Self {
+ Inner::Bool(v)
+ }
+ }
+
+ impl<'v> From<char> for Inner<'v> {
+ fn from(v: char) -> Self {
+ Inner::Char(v)
+ }
+ }
+
+ impl<'v> From<f32> for Inner<'v> {
+ fn from(v: f32) -> Self {
+ Inner::F64(v as f64)
+ }
+ }
+
+ impl<'v> From<f64> for Inner<'v> {
+ fn from(v: f64) -> Self {
+ Inner::F64(v)
+ }
+ }
+
+ impl<'v> From<i8> for Inner<'v> {
+ fn from(v: i8) -> Self {
+ Inner::I64(v as i64)
+ }
+ }
+
+ impl<'v> From<i16> for Inner<'v> {
+ fn from(v: i16) -> Self {
+ Inner::I64(v as i64)
+ }
+ }
+
+ impl<'v> From<i32> for Inner<'v> {
+ fn from(v: i32) -> Self {
+ Inner::I64(v as i64)
+ }
+ }
+
+ impl<'v> From<i64> for Inner<'v> {
+ fn from(v: i64) -> Self {
+ Inner::I64(v as i64)
+ }
+ }
+
+ impl<'v> From<isize> for Inner<'v> {
+ fn from(v: isize) -> Self {
+ Inner::I64(v as i64)
+ }
+ }
+
+ impl<'v> From<u8> for Inner<'v> {
+ fn from(v: u8) -> Self {
+ Inner::U64(v as u64)
+ }
+ }
+
+ impl<'v> From<u16> for Inner<'v> {
+ fn from(v: u16) -> Self {
+ Inner::U64(v as u64)
+ }
+ }
+
+ impl<'v> From<u32> for Inner<'v> {
+ fn from(v: u32) -> Self {
+ Inner::U64(v as u64)
+ }
+ }
+
+ impl<'v> From<u64> for Inner<'v> {
+ fn from(v: u64) -> Self {
+ Inner::U64(v as u64)
+ }
+ }
+
+ impl<'v> From<usize> for Inner<'v> {
+ fn from(v: usize) -> Self {
+ Inner::U64(v as u64)
+ }
+ }
+
+ impl<'v> From<i128> for Inner<'v> {
+ fn from(v: i128) -> Self {
+ Inner::I128(v)
+ }
+ }
+
+ impl<'v> From<u128> for Inner<'v> {
+ fn from(v: u128) -> Self {
+ Inner::U128(v)
+ }
+ }
+
+ impl<'v> From<&'v str> for Inner<'v> {
+ fn from(v: &'v str) -> Self {
+ Inner::Str(v)
+ }
+ }
+
+ impl<'v> fmt::Debug for Inner<'v> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Inner::None => fmt::Debug::fmt(&None::<()>, f),
+ Inner::Bool(v) => fmt::Debug::fmt(v, f),
+ Inner::Str(v) => fmt::Debug::fmt(v, f),
+ Inner::Char(v) => fmt::Debug::fmt(v, f),
+ Inner::I64(v) => fmt::Debug::fmt(v, f),
+ Inner::U64(v) => fmt::Debug::fmt(v, f),
+ Inner::F64(v) => fmt::Debug::fmt(v, f),
+ Inner::I128(v) => fmt::Debug::fmt(v, f),
+ Inner::U128(v) => fmt::Debug::fmt(v, f),
+ Inner::Debug(v) => fmt::Debug::fmt(v, f),
+ Inner::Display(v) => fmt::Display::fmt(v, f),
+ }
+ }
+ }
+
+ impl<'v> fmt::Display for Inner<'v> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Inner::None => fmt::Debug::fmt(&None::<()>, f),
+ Inner::Bool(v) => fmt::Display::fmt(v, f),
+ Inner::Str(v) => fmt::Display::fmt(v, f),
+ Inner::Char(v) => fmt::Display::fmt(v, f),
+ Inner::I64(v) => fmt::Display::fmt(v, f),
+ Inner::U64(v) => fmt::Display::fmt(v, f),
+ Inner::F64(v) => fmt::Display::fmt(v, f),
+ Inner::I128(v) => fmt::Display::fmt(v, f),
+ Inner::U128(v) => fmt::Display::fmt(v, f),
+ Inner::Debug(v) => fmt::Debug::fmt(v, f),
+ Inner::Display(v) => fmt::Display::fmt(v, f),
+ }
+ }
+ }
+
+ impl<'v> Inner<'v> {
+ pub fn from_debug<T: fmt::Debug>(value: &'v T) -> Self {
+ Inner::Debug(value)
+ }
+
+ pub fn from_display<T: fmt::Display>(value: &'v T) -> Self {
+ Inner::Display(value)
+ }
+
+ pub fn from_dyn_debug(value: &'v dyn fmt::Debug) -> Self {
+ Inner::Debug(value)
+ }
+
+ pub fn from_dyn_display(value: &'v dyn fmt::Display) -> Self {
+ Inner::Display(value)
+ }
+
+ pub fn empty() -> Self {
+ Inner::None
+ }
+
+ pub fn to_bool(&self) -> Option<bool> {
+ match self {
+ Inner::Bool(v) => Some(*v),
+ _ => None,
+ }
+ }
+
+ pub fn to_char(&self) -> Option<char> {
+ match self {
+ Inner::Char(v) => Some(*v),
+ _ => None,
+ }
+ }
+
+ pub fn to_f64(&self) -> Option<f64> {
+ match self {
+ Inner::F64(v) => Some(*v),
+ Inner::I64(v) => {
+ let v: i32 = (*v).try_into().ok()?;
+ v.try_into().ok()
+ }
+ Inner::U64(v) => {
+ let v: u32 = (*v).try_into().ok()?;
+ v.try_into().ok()
+ }
+ Inner::I128(v) => {
+ let v: i32 = (*v).try_into().ok()?;
+ v.try_into().ok()
+ }
+ Inner::U128(v) => {
+ let v: u32 = (*v).try_into().ok()?;
+ v.try_into().ok()
+ }
+ _ => None,
+ }
+ }
+
+ pub fn to_i64(&self) -> Option<i64> {
+ match self {
+ Inner::I64(v) => Some(*v),
+ Inner::U64(v) => (*v).try_into().ok(),
+ Inner::I128(v) => (*v).try_into().ok(),
+ Inner::U128(v) => (*v).try_into().ok(),
+ _ => None,
+ }
+ }
+
+ pub fn to_u64(&self) -> Option<u64> {
+ match self {
+ Inner::U64(v) => Some(*v),
+ Inner::I64(v) => (*v).try_into().ok(),
+ Inner::I128(v) => (*v).try_into().ok(),
+ Inner::U128(v) => (*v).try_into().ok(),
+ _ => None,
+ }
+ }
+
+ pub fn to_u128(&self) -> Option<u128> {
+ match self {
+ Inner::U128(v) => Some(*v),
+ Inner::I64(v) => (*v).try_into().ok(),
+ Inner::U64(v) => (*v).try_into().ok(),
+ Inner::I128(v) => (*v).try_into().ok(),
+ _ => None,
+ }
+ }
+
+ pub fn to_i128(&self) -> Option<i128> {
+ match self {
+ Inner::I128(v) => Some(*v),
+ Inner::I64(v) => (*v).try_into().ok(),
+ Inner::U64(v) => (*v).try_into().ok(),
+ Inner::U128(v) => (*v).try_into().ok(),
+ _ => None,
+ }
+ }
+
+ pub fn to_borrowed_str(&self) -> Option<&'v str> {
+ match self {
+ Inner::Str(v) => Some(v),
+ _ => None,
+ }
+ }
+
+ #[cfg(test)]
+ pub fn to_test_token(&self) -> Token {
+ match self {
+ Inner::None => Token::None,
+ Inner::Bool(v) => Token::Bool(*v),
+ Inner::Str(v) => Token::Str(*v),
+ Inner::Char(v) => Token::Char(*v),
+ Inner::I64(v) => Token::I64(*v),
+ Inner::U64(v) => Token::U64(*v),
+ Inner::F64(v) => Token::F64(*v),
+ Inner::I128(_) => unimplemented!(),
+ Inner::U128(_) => unimplemented!(),
+ Inner::Debug(_) => unimplemented!(),
+ Inner::Display(_) => unimplemented!(),
+ }
+ }
+ }
+
+ #[cfg(test)]
+ #[derive(Debug, PartialEq)]
+ pub enum Token<'v> {
+ None,
+ Bool(bool),
+ Char(char),
+ Str(&'v str),
+ F64(f64),
+ I64(i64),
+ U64(u64),
+ }
+
+ pub fn visit<'v>(
+ inner: &Inner<'v>,
+ mut visitor: impl VisitValue<'v>,
+ ) -> Result<(), crate::kv::Error> {
+ match inner {
+ Inner::None => visitor.visit_null(),
+ Inner::Bool(v) => visitor.visit_bool(*v),
+ Inner::Str(v) => visitor.visit_borrowed_str(*v),
+ Inner::Char(v) => visitor.visit_char(*v),
+ Inner::I64(v) => visitor.visit_i64(*v),
+ Inner::U64(v) => visitor.visit_u64(*v),
+ Inner::F64(v) => visitor.visit_f64(*v),
+ Inner::I128(v) => visitor.visit_i128(*v),
+ Inner::U128(v) => visitor.visit_u128(*v),
+ Inner::Debug(v) => visitor.visit_any(Value::from_dyn_debug(*v)),
+ Inner::Display(v) => visitor.visit_any(Value::from_dyn_display(*v)),
+ }
+ }
+}
+
+impl<'v> Value<'v> {
+ /// Get a value from a type implementing `std::fmt::Debug`.
+ #[cfg(feature = "kv_unstable")]
+ #[deprecated(note = "use `from_debug` instead")]
+ pub fn capture_debug<T>(value: &'v T) -> Self
+ where
+ T: fmt::Debug + 'static,
+ {
+ Value::from_debug(value)
+ }
+
+ /// Get a value from a type implementing `std::fmt::Display`.
+ #[cfg(feature = "kv_unstable")]
+ #[deprecated(note = "use `from_display` instead")]
+ pub fn capture_display<T>(value: &'v T) -> Self
+ where
+ T: fmt::Display + 'static,
+ {
+ Value::from_display(value)
+ }
+
+ /// Get a value from an error.
+ #[cfg(feature = "kv_unstable_std")]
+ #[deprecated(note = "use `from_dyn_error` instead")]
+ pub fn capture_error<T>(err: &'v T) -> Self
+ where
+ T: std::error::Error + 'static,
+ {
+ Value::from_dyn_error(err)
+ }
+
+ /// Get a value from a type implementing `serde::Serialize`.
+ #[cfg(feature = "kv_unstable_serde")]
+ #[deprecated(note = "use `from_serde` instead")]
+ pub fn capture_serde<T>(value: &'v T) -> Self
+ where
+ T: serde::Serialize + 'static,
+ {
+ Value::from_serde(value)
+ }
+
+ /// Get a value from a type implementing `sval::Value`.
+ #[cfg(feature = "kv_unstable_sval")]
+ #[deprecated(note = "use `from_sval` instead")]
+ pub fn capture_sval<T>(value: &'v T) -> Self
+ where
+ T: sval::Value + 'static,
+ {
+ Value::from_sval(value)
+ }
+
+ /// Check whether this value can be downcast to `T`.
+ #[cfg(feature = "kv_unstable")]
+ #[deprecated(
+ note = "downcasting has been removed; log an issue at https://github.com/rust-lang/log/issues if this is something you rely on"
+ )]
+ pub fn is<T: 'static>(&self) -> bool {
+ false
+ }
+
+ /// Try downcast this value to `T`.
+ #[cfg(feature = "kv_unstable")]
+ #[deprecated(
+ note = "downcasting has been removed; log an issue at https://github.com/rust-lang/log/issues if this is something you rely on"
+ )]
+ pub fn downcast_ref<T: 'static>(&self) -> Option<&T> {
+ None
+ }
+}
+
+// NOTE: Deprecated; but aliases can't carry this attribute
+#[cfg(feature = "kv_unstable")]
+pub use VisitValue as Visit;
+
+/// Get a value from a type implementing `std::fmt::Debug`.
+#[cfg(feature = "kv_unstable")]
+#[deprecated(note = "use the `key:? = value` macro syntax instead")]
+#[macro_export]
+macro_rules! as_debug {
+ ($capture:expr) => {
+ $crate::kv::Value::from_debug(&$capture)
+ };
+}
+
+/// Get a value from a type implementing `std::fmt::Display`.
+#[cfg(feature = "kv_unstable")]
+#[deprecated(note = "use the `key:% = value` macro syntax instead")]
+#[macro_export]
+macro_rules! as_display {
+ ($capture:expr) => {
+ $crate::kv::Value::from_display(&$capture)
+ };
+}
+
+/// Get a value from an error.
+#[cfg(feature = "kv_unstable_std")]
+#[deprecated(note = "use the `key:err = value` macro syntax instead")]
+#[macro_export]
+macro_rules! as_error {
+ ($capture:expr) => {
+ $crate::kv::Value::from_dyn_error(&$capture)
+ };
+}
+
+#[cfg(feature = "kv_unstable_serde")]
+#[deprecated(note = "use the `key:serde = value` macro syntax instead")]
+/// Get a value from a type implementing `serde::Serialize`.
+#[macro_export]
+macro_rules! as_serde {
+ ($capture:expr) => {
+ $crate::kv::Value::from_serde(&$capture)
+ };
+}
+
+/// Get a value from a type implementing `sval::Value`.
+#[cfg(feature = "kv_unstable_sval")]
+#[deprecated(note = "use the `key:sval = value` macro syntax instead")]
+#[macro_export]
+macro_rules! as_sval {
+ ($capture:expr) => {
+ $crate::kv::Value::from_sval(&$capture)
+ };
+}
+
+#[cfg(test)]
+pub(crate) mod tests {
+ use super::*;
+
+ impl<'v> Value<'v> {
+ pub(crate) fn to_token(&self) -> inner::Token {
+ self.inner.to_test_token()
+ }
+ }
+
+ fn unsigned() -> impl Iterator<Item = Value<'static>> {
+ vec![
+ Value::from(8u8),
+ Value::from(16u16),
+ Value::from(32u32),
+ Value::from(64u64),
+ Value::from(1usize),
+ Value::from(std::num::NonZeroU8::new(8).unwrap()),
+ Value::from(std::num::NonZeroU16::new(16).unwrap()),
+ Value::from(std::num::NonZeroU32::new(32).unwrap()),
+ Value::from(std::num::NonZeroU64::new(64).unwrap()),
+ Value::from(std::num::NonZeroUsize::new(1).unwrap()),
+ ]
+ .into_iter()
+ }
+
+ fn signed() -> impl Iterator<Item = Value<'static>> {
+ vec![
+ Value::from(-8i8),
+ Value::from(-16i16),
+ Value::from(-32i32),
+ Value::from(-64i64),
+ Value::from(-1isize),
+ Value::from(std::num::NonZeroI8::new(-8).unwrap()),
+ Value::from(std::num::NonZeroI16::new(-16).unwrap()),
+ Value::from(std::num::NonZeroI32::new(-32).unwrap()),
+ Value::from(std::num::NonZeroI64::new(-64).unwrap()),
+ Value::from(std::num::NonZeroIsize::new(-1).unwrap()),
+ ]
+ .into_iter()
+ }
+
+ fn float() -> impl Iterator<Item = Value<'static>> {
+ vec![Value::from(32.32f32), Value::from(64.64f64)].into_iter()
+ }
+
+ fn bool() -> impl Iterator<Item = Value<'static>> {
+ vec![Value::from(true), Value::from(false)].into_iter()
+ }
+
+ fn str() -> impl Iterator<Item = Value<'static>> {
+ vec![Value::from("a string"), Value::from("a loong string")].into_iter()
+ }
+
+ fn char() -> impl Iterator<Item = Value<'static>> {
+ vec![Value::from('a'), Value::from('⛰')].into_iter()
+ }
+
+ #[test]
+ fn test_to_value_display() {
+ assert_eq!(42u64.to_value().to_string(), "42");
+ assert_eq!(42i64.to_value().to_string(), "42");
+ assert_eq!(42.01f64.to_value().to_string(), "42.01");
+ assert_eq!(true.to_value().to_string(), "true");
+ assert_eq!('a'.to_value().to_string(), "a");
+ assert_eq!("a loong string".to_value().to_string(), "a loong string");
+ assert_eq!(Some(true).to_value().to_string(), "true");
+ assert_eq!(().to_value().to_string(), "None");
+ assert_eq!(None::<bool>.to_value().to_string(), "None");
+ }
+
+ #[test]
+ fn test_to_value_structured() {
+ assert_eq!(42u64.to_value().to_token(), inner::Token::U64(42));
+ assert_eq!(42i64.to_value().to_token(), inner::Token::I64(42));
+ assert_eq!(42.01f64.to_value().to_token(), inner::Token::F64(42.01));
+ assert_eq!(true.to_value().to_token(), inner::Token::Bool(true));
+ assert_eq!('a'.to_value().to_token(), inner::Token::Char('a'));
+ assert_eq!(
+ "a loong string".to_value().to_token(),
+ inner::Token::Str("a loong string".into())
+ );
+ assert_eq!(Some(true).to_value().to_token(), inner::Token::Bool(true));
+ assert_eq!(().to_value().to_token(), inner::Token::None);
+ assert_eq!(None::<bool>.to_value().to_token(), inner::Token::None);
+ }
+
+ #[test]
+ fn test_to_number() {
+ for v in unsigned() {
+ assert!(v.to_u64().is_some());
+ assert!(v.to_i64().is_some());
+ }
+
+ for v in signed() {
+ assert!(v.to_i64().is_some());
+ }
+
+ for v in unsigned().chain(signed()).chain(float()) {
+ assert!(v.to_f64().is_some());
+ }
+
+ for v in bool().chain(str()).chain(char()) {
+ assert!(v.to_u64().is_none());
+ assert!(v.to_i64().is_none());
+ assert!(v.to_f64().is_none());
+ }
+ }
+
+ #[test]
+ fn test_to_float() {
+ // Only integers from i32::MIN..=u32::MAX can be converted into floats
+ assert!(Value::from(i32::MIN).to_f64().is_some());
+ assert!(Value::from(u32::MAX).to_f64().is_some());
+
+ assert!(Value::from((i32::MIN as i64) - 1).to_f64().is_none());
+ assert!(Value::from((u32::MAX as u64) + 1).to_f64().is_none());
+ }
+
+ #[test]
+ fn test_to_cow_str() {
+ for v in str() {
+ assert!(v.to_borrowed_str().is_some());
+
+ #[cfg(feature = "kv_std")]
+ assert!(v.to_cow_str().is_some());
+ }
+
+ let short_lived = String::from("short lived");
+ let v = Value::from(&*short_lived);
+
+ assert!(v.to_borrowed_str().is_some());
+
+ #[cfg(feature = "kv_std")]
+ assert!(v.to_cow_str().is_some());
+
+ for v in unsigned().chain(signed()).chain(float()).chain(bool()) {
+ assert!(v.to_borrowed_str().is_none());
+
+ #[cfg(feature = "kv_std")]
+ assert!(v.to_cow_str().is_none());
+ }
+ }
+
+ #[test]
+ fn test_to_bool() {
+ for v in bool() {
+ assert!(v.to_bool().is_some());
+ }
+
+ for v in unsigned()
+ .chain(signed())
+ .chain(float())
+ .chain(str())
+ .chain(char())
+ {
+ assert!(v.to_bool().is_none());
+ }
+ }
+
+ #[test]
+ fn test_to_char() {
+ for v in char() {
+ assert!(v.to_char().is_some());
+ }
+
+ for v in unsigned()
+ .chain(signed())
+ .chain(float())
+ .chain(str())
+ .chain(bool())
+ {
+ assert!(v.to_char().is_none());
+ }
+ }
+
+ #[test]
+ fn test_visit_integer() {
+ struct Extract(Option<u64>);
+
+ impl<'v> VisitValue<'v> for Extract {
+ fn visit_any(&mut self, value: Value) -> Result<(), Error> {
+ unimplemented!("unexpected value: {value:?}")
+ }
+
+ fn visit_u64(&mut self, value: u64) -> Result<(), Error> {
+ self.0 = Some(value);
+
+ Ok(())
+ }
+ }
+
+ let mut extract = Extract(None);
+ Value::from(42u64).visit(&mut extract).unwrap();
+
+ assert_eq!(Some(42), extract.0);
+ }
+
+ #[test]
+ fn test_visit_borrowed_str() {
+ struct Extract<'v>(Option<&'v str>);
+
+ impl<'v> VisitValue<'v> for Extract<'v> {
+ fn visit_any(&mut self, value: Value) -> Result<(), Error> {
+ unimplemented!("unexpected value: {value:?}")
+ }
+
+ fn visit_borrowed_str(&mut self, value: &'v str) -> Result<(), Error> {
+ self.0 = Some(value);
+
+ Ok(())
+ }
+ }
+
+ let mut extract = Extract(None);
+
+ let short_lived = String::from("A short-lived string");
+ Value::from(&*short_lived).visit(&mut extract).unwrap();
+
+ assert_eq!(Some("A short-lived string"), extract.0);
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/lib.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/lib.rs
new file mode 100644
index 0000000..6b43a9a
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/lib.rs
@@ -0,0 +1,1878 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A lightweight logging facade.
+//!
+//! The `log` crate provides a single logging API that abstracts over the
+//! actual logging implementation. Libraries can use the logging API provided
+//! by this crate, and the consumer of those libraries can choose the logging
+//! implementation that is most suitable for its use case.
+//!
+//! If no logging implementation is selected, the facade falls back to a "noop"
+//! implementation that ignores all log messages. The overhead in this case
+//! is very small - just an integer load, comparison and jump.
+//!
+//! A log request consists of a _target_, a _level_, and a _body_. A target is a
+//! string which defaults to the module path of the location of the log request,
+//! though that default may be overridden. Logger implementations typically use
+//! the target to filter requests based on some user configuration.
+//!
+//! # Usage
+//!
+//! The basic use of the log crate is through the five logging macros: [`error!`],
+//! [`warn!`], [`info!`], [`debug!`] and [`trace!`]
+//! where `error!` represents the highest-priority log messages
+//! and `trace!` the lowest. The log messages are filtered by configuring
+//! the log level to exclude messages with a lower priority.
+//! Each of these macros accept format strings similarly to [`println!`].
+//!
+//!
+//! [`error!`]: ./macro.error.html
+//! [`warn!`]: ./macro.warn.html
+//! [`info!`]: ./macro.info.html
+//! [`debug!`]: ./macro.debug.html
+//! [`trace!`]: ./macro.trace.html
+//! [`println!`]: https://doc.rust-lang.org/stable/std/macro.println.html
+//!
+//! Avoid writing expressions with side-effects in log statements. They may not be evaluated.
+//!
+//! ## In libraries
+//!
+//! Libraries should link only to the `log` crate, and use the provided
+//! macros to log whatever information will be useful to downstream consumers.
+//!
+//! ### Examples
+//!
+//! ```
+//! # #[derive(Debug)] pub struct Yak(String);
+//! # impl Yak { fn shave(&mut self, _: u32) {} }
+//! # fn find_a_razor() -> Result<u32, u32> { Ok(1) }
+//! use log::{info, warn};
+//!
+//! pub fn shave_the_yak(yak: &mut Yak) {
+//! info!(target: "yak_events", "Commencing yak shaving for {yak:?}");
+//!
+//! loop {
+//! match find_a_razor() {
+//! Ok(razor) => {
+//! info!("Razor located: {razor}");
+//! yak.shave(razor);
+//! break;
+//! }
+//! Err(err) => {
+//! warn!("Unable to locate a razor: {err}, retrying");
+//! }
+//! }
+//! }
+//! }
+//! # fn main() {}
+//! ```
+//!
+//! ## In executables
+//!
+//! Executables should choose a logging implementation and initialize it early in the
+//! runtime of the program. Logging implementations will typically include a
+//! function to do this. Any log messages generated before
+//! the implementation is initialized will be ignored.
+//!
+//! The executable itself may use the `log` crate to log as well.
+//!
+//! ### Warning
+//!
+//! The logging system may only be initialized once.
+//!
+//! ## Structured logging
+//!
+//! If you enable the `kv` feature you can associate structured values
+//! with your log records. If we take the example from before, we can include
+//! some additional context besides what's in the formatted message:
+//!
+//! ```
+//! # use serde::Serialize;
+//! # #[derive(Debug, Serialize)] pub struct Yak(String);
+//! # impl Yak { fn shave(&mut self, _: u32) {} }
+//! # fn find_a_razor() -> Result<u32, std::io::Error> { Ok(1) }
+//! # #[cfg(feature = "kv_serde")]
+//! # fn main() {
+//! use log::{info, warn};
+//!
+//! pub fn shave_the_yak(yak: &mut Yak) {
+//! info!(target: "yak_events", yak:serde; "Commencing yak shaving");
+//!
+//! loop {
+//! match find_a_razor() {
+//! Ok(razor) => {
+//! info!(razor; "Razor located");
+//! yak.shave(razor);
+//! break;
+//! }
+//! Err(e) => {
+//! warn!(e:err; "Unable to locate a razor, retrying");
+//! }
+//! }
+//! }
+//! }
+//! # }
+//! # #[cfg(not(feature = "kv_serde"))]
+//! # fn main() {}
+//! ```
+//!
+//! See the [`kv`] module documentation for more details.
+//!
+//! # Available logging implementations
+//!
+//! In order to produce log output executables have to use
+//! a logger implementation compatible with the facade.
+//! There are many available implementations to choose from,
+//! here are some of the most popular ones:
+//!
+//! * Simple minimal loggers:
+//! * [env_logger]
+//! * [colog]
+//! * [simple_logger]
+//! * [simplelog]
+//! * [pretty_env_logger]
+//! * [stderrlog]
+//! * [flexi_logger]
+//! * [call_logger]
+//! * [structured-logger]
+//! * Complex configurable frameworks:
+//! * [log4rs]
+//! * [fern]
+//! * Adaptors for other facilities:
+//! * [syslog]
+//! * [slog-stdlog]
+//! * [systemd-journal-logger]
+//! * [android_log]
+//! * [win_dbg_logger]
+//! * [db_logger]
+//! * [log-to-defmt]
+//! * [logcontrol-log]
+//! * For WebAssembly binaries:
+//! * [console_log]
+//! * For dynamic libraries:
+//! * You may need to construct an FFI-safe wrapper over `log` to initialize in your libraries
+//! * Utilities:
+//! * [log_err]
+//! * [log-reload]
+//!
+//! # Implementing a Logger
+//!
+//! Loggers implement the [`Log`] trait. Here's a very basic example that simply
+//! logs all messages at the [`Error`][level_link], [`Warn`][level_link] or
+//! [`Info`][level_link] levels to stdout:
+//!
+//! ```
+//! use log::{Record, Level, Metadata};
+//!
+//! struct SimpleLogger;
+//!
+//! impl log::Log for SimpleLogger {
+//! fn enabled(&self, metadata: &Metadata) -> bool {
+//! metadata.level() <= Level::Info
+//! }
+//!
+//! fn log(&self, record: &Record) {
+//! if self.enabled(record.metadata()) {
+//! println!("{} - {}", record.level(), record.args());
+//! }
+//! }
+//!
+//! fn flush(&self) {}
+//! }
+//!
+//! # fn main() {}
+//! ```
+//!
+//! Loggers are installed by calling the [`set_logger`] function. The maximum
+//! log level also needs to be adjusted via the [`set_max_level`] function. The
+//! logging facade uses this as an optimization to improve performance of log
+//! messages at levels that are disabled. It's important to set it, as it
+//! defaults to [`Off`][filter_link], so no log messages will ever be captured!
+//! In the case of our example logger, we'll want to set the maximum log level
+//! to [`Info`][filter_link], since we ignore any [`Debug`][level_link] or
+//! [`Trace`][level_link] level log messages. A logging implementation should
+//! provide a function that wraps a call to [`set_logger`] and
+//! [`set_max_level`], handling initialization of the logger:
+//!
+//! ```
+//! # use log::{Level, Metadata};
+//! # struct SimpleLogger;
+//! # impl log::Log for SimpleLogger {
+//! # fn enabled(&self, _: &Metadata) -> bool { false }
+//! # fn log(&self, _: &log::Record) {}
+//! # fn flush(&self) {}
+//! # }
+//! # fn main() {}
+//! use log::{SetLoggerError, LevelFilter};
+//!
+//! static LOGGER: SimpleLogger = SimpleLogger;
+//!
+//! pub fn init() -> Result<(), SetLoggerError> {
+//! log::set_logger(&LOGGER)
+//! .map(|()| log::set_max_level(LevelFilter::Info))
+//! }
+//! ```
+//!
+//! Implementations that adjust their configurations at runtime should take care
+//! to adjust the maximum log level as well.
+//!
+//! # Use with `std`
+//!
+//! `set_logger` requires you to provide a `&'static Log`, which can be hard to
+//! obtain if your logger depends on some runtime configuration. The
+//! `set_boxed_logger` function is available with the `std` Cargo feature. It is
+//! identical to `set_logger` except that it takes a `Box<Log>` rather than a
+//! `&'static Log`:
+//!
+//! ```
+//! # use log::{Level, LevelFilter, Log, SetLoggerError, Metadata};
+//! # struct SimpleLogger;
+//! # impl log::Log for SimpleLogger {
+//! # fn enabled(&self, _: &Metadata) -> bool { false }
+//! # fn log(&self, _: &log::Record) {}
+//! # fn flush(&self) {}
+//! # }
+//! # fn main() {}
+//! # #[cfg(feature = "std")]
+//! pub fn init() -> Result<(), SetLoggerError> {
+//! log::set_boxed_logger(Box::new(SimpleLogger))
+//! .map(|()| log::set_max_level(LevelFilter::Info))
+//! }
+//! ```
+//!
+//! # Compile time filters
+//!
+//! Log levels can be statically disabled at compile time by enabling one of these Cargo features:
+//!
+//! * `max_level_off`
+//! * `max_level_error`
+//! * `max_level_warn`
+//! * `max_level_info`
+//! * `max_level_debug`
+//! * `max_level_trace`
+//!
+//! Log invocations at disabled levels will be skipped and will not even be present in the
+//! resulting binary. These features control the value of the `STATIC_MAX_LEVEL` constant. The
+//! logging macros check this value before logging a message. By default, no levels are disabled.
+//!
+//! It is possible to override this level for release builds only with the following features:
+//!
+//! * `release_max_level_off`
+//! * `release_max_level_error`
+//! * `release_max_level_warn`
+//! * `release_max_level_info`
+//! * `release_max_level_debug`
+//! * `release_max_level_trace`
+//!
+//! Libraries should avoid using the max level features because they're global and can't be changed
+//! once they're set.
+//!
+//! For example, a crate can disable trace level logs in debug builds and trace, debug, and info
+//! level logs in release builds with the following configuration:
+//!
+//! ```toml
+//! [dependencies]
+//! log = { version = "0.4", features = ["max_level_debug", "release_max_level_warn"] }
+//! ```
+//! # Crate Feature Flags
+//!
+//! The following crate feature flags are available in addition to the filters. They are
+//! configured in your `Cargo.toml`.
+//!
+//! * `std` allows use of `std` crate instead of the default `core`. Enables using `std::error` and
+//! `set_boxed_logger` functionality.
+//! * `serde` enables support for serialization and deserialization of `Level` and `LevelFilter`.
+//!
+//! ```toml
+//! [dependencies]
+//! log = { version = "0.4", features = ["std", "serde"] }
+//! ```
+//!
+//! # Version compatibility
+//!
+//! The 0.3 and 0.4 versions of the `log` crate are almost entirely compatible. Log messages
+//! made using `log` 0.3 will forward transparently to a logger implementation using `log` 0.4. Log
+//! messages made using `log` 0.4 will forward to a logger implementation using `log` 0.3, but the
+//! module path and file name information associated with the message will unfortunately be lost.
+//!
+//! [`Log`]: trait.Log.html
+//! [level_link]: enum.Level.html
+//! [filter_link]: enum.LevelFilter.html
+//! [`set_logger`]: fn.set_logger.html
+//! [`set_max_level`]: fn.set_max_level.html
+//! [`try_set_logger_raw`]: fn.try_set_logger_raw.html
+//! [`shutdown_logger_raw`]: fn.shutdown_logger_raw.html
+//! [env_logger]: https://docs.rs/env_logger/*/env_logger/
+//! [colog]: https://docs.rs/colog/*/colog/
+//! [simple_logger]: https://github.com/borntyping/rust-simple_logger
+//! [simplelog]: https://github.com/drakulix/simplelog.rs
+//! [pretty_env_logger]: https://docs.rs/pretty_env_logger/*/pretty_env_logger/
+//! [stderrlog]: https://docs.rs/stderrlog/*/stderrlog/
+//! [flexi_logger]: https://docs.rs/flexi_logger/*/flexi_logger/
+//! [call_logger]: https://docs.rs/call_logger/*/call_logger/
+//! [syslog]: https://docs.rs/syslog/*/syslog/
+//! [slog-stdlog]: https://docs.rs/slog-stdlog/*/slog_stdlog/
+//! [log4rs]: https://docs.rs/log4rs/*/log4rs/
+//! [fern]: https://docs.rs/fern/*/fern/
+//! [systemd-journal-logger]: https://docs.rs/systemd-journal-logger/*/systemd_journal_logger/
+//! [android_log]: https://docs.rs/android_log/*/android_log/
+//! [win_dbg_logger]: https://docs.rs/win_dbg_logger/*/win_dbg_logger/
+//! [db_logger]: https://docs.rs/db_logger/*/db_logger/
+//! [log-to-defmt]: https://docs.rs/log-to-defmt/*/log_to_defmt/
+//! [console_log]: https://docs.rs/console_log/*/console_log/
+//! [structured-logger]: https://docs.rs/structured-logger/latest/structured_logger/
+//! [logcontrol-log]: https://docs.rs/logcontrol-log/*/logcontrol_log/
+//! [log_err]: https://docs.rs/log_err/*/log_err/
+//! [log-reload]: https://docs.rs/log-reload/*/log_reload/
+
+#![doc(
+ html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://www.rust-lang.org/favicon.ico",
+ html_root_url = "https://docs.rs/log/0.4.22"
+)]
+#![warn(missing_docs)]
+#![deny(missing_debug_implementations, unconditional_recursion)]
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+
+#[cfg(any(
+ all(feature = "max_level_off", feature = "max_level_error"),
+ all(feature = "max_level_off", feature = "max_level_warn"),
+ all(feature = "max_level_off", feature = "max_level_info"),
+ all(feature = "max_level_off", feature = "max_level_debug"),
+ all(feature = "max_level_off", feature = "max_level_trace"),
+ all(feature = "max_level_error", feature = "max_level_warn"),
+ all(feature = "max_level_error", feature = "max_level_info"),
+ all(feature = "max_level_error", feature = "max_level_debug"),
+ all(feature = "max_level_error", feature = "max_level_trace"),
+ all(feature = "max_level_warn", feature = "max_level_info"),
+ all(feature = "max_level_warn", feature = "max_level_debug"),
+ all(feature = "max_level_warn", feature = "max_level_trace"),
+ all(feature = "max_level_info", feature = "max_level_debug"),
+ all(feature = "max_level_info", feature = "max_level_trace"),
+ all(feature = "max_level_debug", feature = "max_level_trace"),
+))]
+compile_error!("multiple max_level_* features set");
+
+#[rustfmt::skip]
+#[cfg(any(
+ all(feature = "release_max_level_off", feature = "release_max_level_error"),
+ all(feature = "release_max_level_off", feature = "release_max_level_warn"),
+ all(feature = "release_max_level_off", feature = "release_max_level_info"),
+ all(feature = "release_max_level_off", feature = "release_max_level_debug"),
+ all(feature = "release_max_level_off", feature = "release_max_level_trace"),
+ all(feature = "release_max_level_error", feature = "release_max_level_warn"),
+ all(feature = "release_max_level_error", feature = "release_max_level_info"),
+ all(feature = "release_max_level_error", feature = "release_max_level_debug"),
+ all(feature = "release_max_level_error", feature = "release_max_level_trace"),
+ all(feature = "release_max_level_warn", feature = "release_max_level_info"),
+ all(feature = "release_max_level_warn", feature = "release_max_level_debug"),
+ all(feature = "release_max_level_warn", feature = "release_max_level_trace"),
+ all(feature = "release_max_level_info", feature = "release_max_level_debug"),
+ all(feature = "release_max_level_info", feature = "release_max_level_trace"),
+ all(feature = "release_max_level_debug", feature = "release_max_level_trace"),
+))]
+compile_error!("multiple release_max_level_* features set");
+
+#[cfg(all(not(feature = "std"), not(test)))]
+extern crate core as std;
+
+use std::cfg;
+#[cfg(feature = "std")]
+use std::error;
+use std::str::FromStr;
+use std::{cmp, fmt, mem};
+
+#[macro_use]
+mod macros;
+mod serde;
+
+#[cfg(feature = "kv")]
+pub mod kv;
+
+#[cfg(target_has_atomic = "ptr")]
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+#[cfg(not(target_has_atomic = "ptr"))]
+use std::cell::Cell;
+#[cfg(not(target_has_atomic = "ptr"))]
+use std::sync::atomic::Ordering;
+
+#[cfg(not(target_has_atomic = "ptr"))]
+struct AtomicUsize {
+ v: Cell<usize>,
+}
+
+#[cfg(not(target_has_atomic = "ptr"))]
+impl AtomicUsize {
+ const fn new(v: usize) -> AtomicUsize {
+ AtomicUsize { v: Cell::new(v) }
+ }
+
+ fn load(&self, _order: Ordering) -> usize {
+ self.v.get()
+ }
+
+ fn store(&self, val: usize, _order: Ordering) {
+ self.v.set(val)
+ }
+
+ #[cfg(target_has_atomic = "ptr")]
+ fn compare_exchange(
+ &self,
+ current: usize,
+ new: usize,
+ _success: Ordering,
+ _failure: Ordering,
+ ) -> Result<usize, usize> {
+ let prev = self.v.get();
+ if current == prev {
+ self.v.set(new);
+ }
+ Ok(prev)
+ }
+}
+
+// Any platform without atomics is unlikely to have multiple cores, so
+// writing via Cell will not be a race condition.
+#[cfg(not(target_has_atomic = "ptr"))]
+unsafe impl Sync for AtomicUsize {}
+
+// The LOGGER static holds a pointer to the global logger. It is protected by
+// the STATE static which determines whether LOGGER has been initialized yet.
+static mut LOGGER: &dyn Log = &NopLogger;
+
+static STATE: AtomicUsize = AtomicUsize::new(0);
+
+// There are three different states that we care about: the logger's
+// uninitialized, the logger's initializing (set_logger's been called but
+// LOGGER hasn't actually been set yet), or the logger's active.
+const UNINITIALIZED: usize = 0;
+const INITIALIZING: usize = 1;
+const INITIALIZED: usize = 2;
+
+static MAX_LOG_LEVEL_FILTER: AtomicUsize = AtomicUsize::new(0);
+
+static LOG_LEVEL_NAMES: [&str; 6] = ["OFF", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
+
+static SET_LOGGER_ERROR: &str = "attempted to set a logger after the logging system \
+ was already initialized";
+static LEVEL_PARSE_ERROR: &str =
+ "attempted to convert a string that doesn't match an existing log level";
+
+/// An enum representing the available verbosity levels of the logger.
+///
+/// Typical usage includes: checking if a certain `Level` is enabled with
+/// [`log_enabled!`](macro.log_enabled.html), specifying the `Level` of
+/// [`log!`](macro.log.html), and comparing a `Level` directly to a
+/// [`LevelFilter`](enum.LevelFilter.html).
+#[repr(usize)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub enum Level {
+ /// The "error" level.
+ ///
+ /// Designates very serious errors.
+ // This way these line up with the discriminants for LevelFilter below
+ // This works because Rust treats field-less enums the same way as C does:
+ // https://doc.rust-lang.org/reference/items/enumerations.html#custom-discriminant-values-for-field-less-enumerations
+ Error = 1,
+ /// The "warn" level.
+ ///
+ /// Designates hazardous situations.
+ Warn,
+ /// The "info" level.
+ ///
+ /// Designates useful information.
+ Info,
+ /// The "debug" level.
+ ///
+ /// Designates lower priority information.
+ Debug,
+ /// The "trace" level.
+ ///
+ /// Designates very low priority, often extremely verbose, information.
+ Trace,
+}
+
+impl PartialEq<LevelFilter> for Level {
+ #[inline]
+ fn eq(&self, other: &LevelFilter) -> bool {
+ *self as usize == *other as usize
+ }
+}
+
+impl PartialOrd<LevelFilter> for Level {
+ #[inline]
+ fn partial_cmp(&self, other: &LevelFilter) -> Option<cmp::Ordering> {
+ Some((*self as usize).cmp(&(*other as usize)))
+ }
+}
+
+impl FromStr for Level {
+ type Err = ParseLevelError;
+ fn from_str(level: &str) -> Result<Level, Self::Err> {
+ LOG_LEVEL_NAMES
+ .iter()
+ .position(|&name| name.eq_ignore_ascii_case(level))
+ .into_iter()
+ .filter(|&idx| idx != 0)
+ .map(|idx| Level::from_usize(idx).unwrap())
+ .next()
+ .ok_or(ParseLevelError(()))
+ }
+}
+
+impl fmt::Display for Level {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.pad(self.as_str())
+ }
+}
+
+impl Level {
+ fn from_usize(u: usize) -> Option<Level> {
+ match u {
+ 1 => Some(Level::Error),
+ 2 => Some(Level::Warn),
+ 3 => Some(Level::Info),
+ 4 => Some(Level::Debug),
+ 5 => Some(Level::Trace),
+ _ => None,
+ }
+ }
+
+ /// Returns the most verbose logging level.
+ #[inline]
+ pub fn max() -> Level {
+ Level::Trace
+ }
+
+ /// Converts the `Level` to the equivalent `LevelFilter`.
+ #[inline]
+ pub fn to_level_filter(&self) -> LevelFilter {
+ LevelFilter::from_usize(*self as usize).unwrap()
+ }
+
+ /// Returns the string representation of the `Level`.
+ ///
+ /// This returns the same string as the `fmt::Display` implementation.
+ pub fn as_str(&self) -> &'static str {
+ LOG_LEVEL_NAMES[*self as usize]
+ }
+
+ /// Iterate through all supported logging levels.
+ ///
+ /// The order of iteration is from more severe to less severe log messages.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use log::Level;
+ ///
+ /// let mut levels = Level::iter();
+ ///
+ /// assert_eq!(Some(Level::Error), levels.next());
+ /// assert_eq!(Some(Level::Trace), levels.last());
+ /// ```
+ pub fn iter() -> impl Iterator<Item = Self> {
+ (1..6).map(|i| Self::from_usize(i).unwrap())
+ }
+}
+
+/// An enum representing the available verbosity level filters of the logger.
+///
+/// A `LevelFilter` may be compared directly to a [`Level`]. Use this type
+/// to get and set the maximum log level with [`max_level()`] and [`set_max_level`].
+///
+/// [`Level`]: enum.Level.html
+/// [`max_level()`]: fn.max_level.html
+/// [`set_max_level`]: fn.set_max_level.html
+#[repr(usize)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub enum LevelFilter {
+ /// A level lower than all log levels.
+ Off,
+ /// Corresponds to the `Error` log level.
+ Error,
+ /// Corresponds to the `Warn` log level.
+ Warn,
+ /// Corresponds to the `Info` log level.
+ Info,
+ /// Corresponds to the `Debug` log level.
+ Debug,
+ /// Corresponds to the `Trace` log level.
+ Trace,
+}
+
+impl PartialEq<Level> for LevelFilter {
+ #[inline]
+ fn eq(&self, other: &Level) -> bool {
+ other.eq(self)
+ }
+}
+
+impl PartialOrd<Level> for LevelFilter {
+ #[inline]
+ fn partial_cmp(&self, other: &Level) -> Option<cmp::Ordering> {
+ Some((*self as usize).cmp(&(*other as usize)))
+ }
+}
+
+impl FromStr for LevelFilter {
+ type Err = ParseLevelError;
+ fn from_str(level: &str) -> Result<LevelFilter, Self::Err> {
+ LOG_LEVEL_NAMES
+ .iter()
+ .position(|&name| name.eq_ignore_ascii_case(level))
+ .map(|p| LevelFilter::from_usize(p).unwrap())
+ .ok_or(ParseLevelError(()))
+ }
+}
+
+impl fmt::Display for LevelFilter {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.pad(self.as_str())
+ }
+}
+
+impl LevelFilter {
+ fn from_usize(u: usize) -> Option<LevelFilter> {
+ match u {
+ 0 => Some(LevelFilter::Off),
+ 1 => Some(LevelFilter::Error),
+ 2 => Some(LevelFilter::Warn),
+ 3 => Some(LevelFilter::Info),
+ 4 => Some(LevelFilter::Debug),
+ 5 => Some(LevelFilter::Trace),
+ _ => None,
+ }
+ }
+
+ /// Returns the most verbose logging level filter.
+ #[inline]
+ pub fn max() -> LevelFilter {
+ LevelFilter::Trace
+ }
+
+ /// Converts `self` to the equivalent `Level`.
+ ///
+ /// Returns `None` if `self` is `LevelFilter::Off`.
+ #[inline]
+ pub fn to_level(&self) -> Option<Level> {
+ Level::from_usize(*self as usize)
+ }
+
+ /// Returns the string representation of the `LevelFilter`.
+ ///
+ /// This returns the same string as the `fmt::Display` implementation.
+ pub fn as_str(&self) -> &'static str {
+ LOG_LEVEL_NAMES[*self as usize]
+ }
+
+ /// Iterate through all supported filtering levels.
+ ///
+ /// The order of iteration is from less to more verbose filtering.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use log::LevelFilter;
+ ///
+ /// let mut levels = LevelFilter::iter();
+ ///
+ /// assert_eq!(Some(LevelFilter::Off), levels.next());
+ /// assert_eq!(Some(LevelFilter::Trace), levels.last());
+ /// ```
+ pub fn iter() -> impl Iterator<Item = Self> {
+ (0..6).map(|i| Self::from_usize(i).unwrap())
+ }
+}
+
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
+enum MaybeStaticStr<'a> {
+ Static(&'static str),
+ Borrowed(&'a str),
+}
+
+impl<'a> MaybeStaticStr<'a> {
+ #[inline]
+ fn get(&self) -> &'a str {
+ match *self {
+ MaybeStaticStr::Static(s) => s,
+ MaybeStaticStr::Borrowed(s) => s,
+ }
+ }
+}
+
+/// The "payload" of a log message.
+///
+/// # Use
+///
+/// `Record` structures are passed as parameters to the [`log`][method.log]
+/// method of the [`Log`] trait. Logger implementors manipulate these
+/// structures in order to display log messages. `Record`s are automatically
+/// created by the [`log!`] macro and so are not seen by log users.
+///
+/// Note that the [`level()`] and [`target()`] accessors are equivalent to
+/// `self.metadata().level()` and `self.metadata().target()` respectively.
+/// These methods are provided as a convenience for users of this structure.
+///
+/// # Example
+///
+/// The following example shows a simple logger that displays the level,
+/// module path, and message of any `Record` that is passed to it.
+///
+/// ```
+/// struct SimpleLogger;
+///
+/// impl log::Log for SimpleLogger {
+/// fn enabled(&self, _metadata: &log::Metadata) -> bool {
+/// true
+/// }
+///
+/// fn log(&self, record: &log::Record) {
+/// if !self.enabled(record.metadata()) {
+/// return;
+/// }
+///
+/// println!("{}:{} -- {}",
+/// record.level(),
+/// record.target(),
+/// record.args());
+/// }
+/// fn flush(&self) {}
+/// }
+/// ```
+///
+/// [method.log]: trait.Log.html#tymethod.log
+/// [`Log`]: trait.Log.html
+/// [`log!`]: macro.log.html
+/// [`level()`]: struct.Record.html#method.level
+/// [`target()`]: struct.Record.html#method.target
+#[derive(Clone, Debug)]
+pub struct Record<'a> {
+ metadata: Metadata<'a>,
+ args: fmt::Arguments<'a>,
+ module_path: Option<MaybeStaticStr<'a>>,
+ file: Option<MaybeStaticStr<'a>>,
+ line: Option<u32>,
+ #[cfg(feature = "kv")]
+ key_values: KeyValues<'a>,
+}
+
+// This wrapper type is only needed so we can
+// `#[derive(Debug)]` on `Record`. It also
+// provides a useful `Debug` implementation for
+// the underlying `Source`.
+#[cfg(feature = "kv")]
+#[derive(Clone)]
+struct KeyValues<'a>(&'a dyn kv::Source);
+
+#[cfg(feature = "kv")]
+impl<'a> fmt::Debug for KeyValues<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut visitor = f.debug_map();
+ self.0.visit(&mut visitor).map_err(|_| fmt::Error)?;
+ visitor.finish()
+ }
+}
+
+impl<'a> Record<'a> {
+ /// Returns a new builder.
+ #[inline]
+ pub fn builder() -> RecordBuilder<'a> {
+ RecordBuilder::new()
+ }
+
+ /// The message body.
+ #[inline]
+ pub fn args(&self) -> &fmt::Arguments<'a> {
+ &self.args
+ }
+
+ /// Metadata about the log directive.
+ #[inline]
+ pub fn metadata(&self) -> &Metadata<'a> {
+ &self.metadata
+ }
+
+ /// The verbosity level of the message.
+ #[inline]
+ pub fn level(&self) -> Level {
+ self.metadata.level()
+ }
+
+ /// The name of the target of the directive.
+ #[inline]
+ pub fn target(&self) -> &'a str {
+ self.metadata.target()
+ }
+
+ /// The module path of the message.
+ #[inline]
+ pub fn module_path(&self) -> Option<&'a str> {
+ self.module_path.map(|s| s.get())
+ }
+
+ /// The module path of the message, if it is a `'static` string.
+ #[inline]
+ pub fn module_path_static(&self) -> Option<&'static str> {
+ match self.module_path {
+ Some(MaybeStaticStr::Static(s)) => Some(s),
+ _ => None,
+ }
+ }
+
+ /// The source file containing the message.
+ #[inline]
+ pub fn file(&self) -> Option<&'a str> {
+ self.file.map(|s| s.get())
+ }
+
+ /// The source file containing the message, if it is a `'static` string.
+ #[inline]
+ pub fn file_static(&self) -> Option<&'static str> {
+ match self.file {
+ Some(MaybeStaticStr::Static(s)) => Some(s),
+ _ => None,
+ }
+ }
+
+ /// The line containing the message.
+ #[inline]
+ pub fn line(&self) -> Option<u32> {
+ self.line
+ }
+
+ /// The structured key-value pairs associated with the message.
+ #[cfg(feature = "kv")]
+ #[inline]
+ pub fn key_values(&self) -> &dyn kv::Source {
+ self.key_values.0
+ }
+
+ /// Create a new [`RecordBuilder`](struct.RecordBuilder.html) based on this record.
+ #[cfg(feature = "kv")]
+ #[inline]
+ pub fn to_builder(&self) -> RecordBuilder {
+ RecordBuilder {
+ record: Record {
+ metadata: Metadata {
+ level: self.metadata.level,
+ target: self.metadata.target,
+ },
+ args: self.args,
+ module_path: self.module_path,
+ file: self.file,
+ line: self.line,
+ key_values: self.key_values.clone(),
+ },
+ }
+ }
+}
+
+/// Builder for [`Record`](struct.Record.html).
+///
+/// Typically should only be used by log library creators or for testing and "shim loggers".
+/// The `RecordBuilder` can set the different parameters of `Record` object, and returns
+/// the created object when `build` is called.
+///
+/// # Examples
+///
+/// ```
+/// use log::{Level, Record};
+///
+/// let record = Record::builder()
+/// .args(format_args!("Error!"))
+/// .level(Level::Error)
+/// .target("myApp")
+/// .file(Some("server.rs"))
+/// .line(Some(144))
+/// .module_path(Some("server"))
+/// .build();
+/// ```
+///
+/// Alternatively, use [`MetadataBuilder`](struct.MetadataBuilder.html):
+///
+/// ```
+/// use log::{Record, Level, MetadataBuilder};
+///
+/// let error_metadata = MetadataBuilder::new()
+/// .target("myApp")
+/// .level(Level::Error)
+/// .build();
+///
+/// let record = Record::builder()
+/// .metadata(error_metadata)
+/// .args(format_args!("Error!"))
+/// .line(Some(433))
+/// .file(Some("app.rs"))
+/// .module_path(Some("server"))
+/// .build();
+/// ```
+#[derive(Debug)]
+pub struct RecordBuilder<'a> {
+ record: Record<'a>,
+}
+
+impl<'a> RecordBuilder<'a> {
+ /// Construct new `RecordBuilder`.
+ ///
+ /// The default options are:
+ ///
+ /// - `args`: [`format_args!("")`]
+ /// - `metadata`: [`Metadata::builder().build()`]
+ /// - `module_path`: `None`
+ /// - `file`: `None`
+ /// - `line`: `None`
+ ///
+ /// [`format_args!("")`]: https://doc.rust-lang.org/std/macro.format_args.html
+ /// [`Metadata::builder().build()`]: struct.MetadataBuilder.html#method.build
+ #[inline]
+ pub fn new() -> RecordBuilder<'a> {
+ RecordBuilder {
+ record: Record {
+ args: format_args!(""),
+ metadata: Metadata::builder().build(),
+ module_path: None,
+ file: None,
+ line: None,
+ #[cfg(feature = "kv")]
+ key_values: KeyValues(&None::<(kv::Key, kv::Value)>),
+ },
+ }
+ }
+
+ /// Set [`args`](struct.Record.html#method.args).
+ #[inline]
+ pub fn args(&mut self, args: fmt::Arguments<'a>) -> &mut RecordBuilder<'a> {
+ self.record.args = args;
+ self
+ }
+
+ /// Set [`metadata`](struct.Record.html#method.metadata). Construct a `Metadata` object with [`MetadataBuilder`](struct.MetadataBuilder.html).
+ #[inline]
+ pub fn metadata(&mut self, metadata: Metadata<'a>) -> &mut RecordBuilder<'a> {
+ self.record.metadata = metadata;
+ self
+ }
+
+ /// Set [`Metadata::level`](struct.Metadata.html#method.level).
+ #[inline]
+ pub fn level(&mut self, level: Level) -> &mut RecordBuilder<'a> {
+ self.record.metadata.level = level;
+ self
+ }
+
+ /// Set [`Metadata::target`](struct.Metadata.html#method.target)
+ #[inline]
+ pub fn target(&mut self, target: &'a str) -> &mut RecordBuilder<'a> {
+ self.record.metadata.target = target;
+ self
+ }
+
+ /// Set [`module_path`](struct.Record.html#method.module_path)
+ #[inline]
+ pub fn module_path(&mut self, path: Option<&'a str>) -> &mut RecordBuilder<'a> {
+ self.record.module_path = path.map(MaybeStaticStr::Borrowed);
+ self
+ }
+
+ /// Set [`module_path`](struct.Record.html#method.module_path) to a `'static` string
+ #[inline]
+ pub fn module_path_static(&mut self, path: Option<&'static str>) -> &mut RecordBuilder<'a> {
+ self.record.module_path = path.map(MaybeStaticStr::Static);
+ self
+ }
+
+ /// Set [`file`](struct.Record.html#method.file)
+ #[inline]
+ pub fn file(&mut self, file: Option<&'a str>) -> &mut RecordBuilder<'a> {
+ self.record.file = file.map(MaybeStaticStr::Borrowed);
+ self
+ }
+
+ /// Set [`file`](struct.Record.html#method.file) to a `'static` string.
+ #[inline]
+ pub fn file_static(&mut self, file: Option<&'static str>) -> &mut RecordBuilder<'a> {
+ self.record.file = file.map(MaybeStaticStr::Static);
+ self
+ }
+
+ /// Set [`line`](struct.Record.html#method.line)
+ #[inline]
+ pub fn line(&mut self, line: Option<u32>) -> &mut RecordBuilder<'a> {
+ self.record.line = line;
+ self
+ }
+
+ /// Set [`key_values`](struct.Record.html#method.key_values)
+ #[cfg(feature = "kv")]
+ #[inline]
+ pub fn key_values(&mut self, kvs: &'a dyn kv::Source) -> &mut RecordBuilder<'a> {
+ self.record.key_values = KeyValues(kvs);
+ self
+ }
+
+ /// Invoke the builder and return a `Record`
+ #[inline]
+ pub fn build(&self) -> Record<'a> {
+ self.record.clone()
+ }
+}
+
+impl<'a> Default for RecordBuilder<'a> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+/// Metadata about a log message.
+///
+/// # Use
+///
+/// `Metadata` structs are created when users of the library use
+/// logging macros.
+///
+/// They are consumed by implementations of the `Log` trait in the
+/// `enabled` method.
+///
+/// `Record`s use `Metadata` to determine the log message's severity
+/// and target.
+///
+/// Users should use the `log_enabled!` macro in their code to avoid
+/// constructing expensive log messages.
+///
+/// # Examples
+///
+/// ```
+/// use log::{Record, Level, Metadata};
+///
+/// struct MyLogger;
+///
+/// impl log::Log for MyLogger {
+/// fn enabled(&self, metadata: &Metadata) -> bool {
+/// metadata.level() <= Level::Info
+/// }
+///
+/// fn log(&self, record: &Record) {
+/// if self.enabled(record.metadata()) {
+/// println!("{} - {}", record.level(), record.args());
+/// }
+/// }
+/// fn flush(&self) {}
+/// }
+///
+/// # fn main(){}
+/// ```
+#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
+pub struct Metadata<'a> {
+ level: Level,
+ target: &'a str,
+}
+
+impl<'a> Metadata<'a> {
+ /// Returns a new builder.
+ #[inline]
+ pub fn builder() -> MetadataBuilder<'a> {
+ MetadataBuilder::new()
+ }
+
+ /// The verbosity level of the message.
+ #[inline]
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// The name of the target of the directive.
+ #[inline]
+ pub fn target(&self) -> &'a str {
+ self.target
+ }
+}
+
+/// Builder for [`Metadata`](struct.Metadata.html).
+///
+/// Typically should only be used by log library creators or for testing and "shim loggers".
+/// The `MetadataBuilder` can set the different parameters of a `Metadata` object, and returns
+/// the created object when `build` is called.
+///
+/// # Example
+///
+/// ```
+/// let target = "myApp";
+/// use log::{Level, MetadataBuilder};
+/// let metadata = MetadataBuilder::new()
+/// .level(Level::Debug)
+/// .target(target)
+/// .build();
+/// ```
+#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
+pub struct MetadataBuilder<'a> {
+ metadata: Metadata<'a>,
+}
+
+impl<'a> MetadataBuilder<'a> {
+ /// Construct a new `MetadataBuilder`.
+ ///
+ /// The default options are:
+ ///
+ /// - `level`: `Level::Info`
+ /// - `target`: `""`
+ #[inline]
+ pub fn new() -> MetadataBuilder<'a> {
+ MetadataBuilder {
+ metadata: Metadata {
+ level: Level::Info,
+ target: "",
+ },
+ }
+ }
+
+ /// Setter for [`level`](struct.Metadata.html#method.level).
+ #[inline]
+ pub fn level(&mut self, arg: Level) -> &mut MetadataBuilder<'a> {
+ self.metadata.level = arg;
+ self
+ }
+
+ /// Setter for [`target`](struct.Metadata.html#method.target).
+ #[inline]
+ pub fn target(&mut self, target: &'a str) -> &mut MetadataBuilder<'a> {
+ self.metadata.target = target;
+ self
+ }
+
+ /// Returns a `Metadata` object.
+ #[inline]
+ pub fn build(&self) -> Metadata<'a> {
+ self.metadata.clone()
+ }
+}
+
+impl<'a> Default for MetadataBuilder<'a> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+/// A trait encapsulating the operations required of a logger.
+pub trait Log: Sync + Send {
+ /// Determines if a log message with the specified metadata would be
+ /// logged.
+ ///
+ /// This is used by the `log_enabled!` macro to allow callers to avoid
+ /// expensive computation of log message arguments if the message would be
+ /// discarded anyway.
+ ///
+ /// # For implementors
+ ///
+ /// This method isn't called automatically by the `log!` macros.
+ /// It's up to an implementation of the `Log` trait to call `enabled` in its own
+ /// `log` method implementation to guarantee that filtering is applied.
+ fn enabled(&self, metadata: &Metadata) -> bool;
+
+ /// Logs the `Record`.
+ ///
+ /// # For implementors
+ ///
+ /// Note that `enabled` is *not* necessarily called before this method.
+ /// Implementations of `log` should perform all necessary filtering
+ /// internally.
+ fn log(&self, record: &Record);
+
+ /// Flushes any buffered records.
+ ///
+ /// # For implementors
+ ///
+ /// This method isn't called automatically by the `log!` macros.
+ /// It can be called manually on shut-down to ensure any in-flight records are flushed.
+ fn flush(&self);
+}
+
+// Just used as a dummy initial value for LOGGER
+struct NopLogger;
+
+impl Log for NopLogger {
+ fn enabled(&self, _: &Metadata) -> bool {
+ false
+ }
+
+ fn log(&self, _: &Record) {}
+ fn flush(&self) {}
+}
+
+impl<T> Log for &'_ T
+where
+ T: ?Sized + Log,
+{
+ fn enabled(&self, metadata: &Metadata) -> bool {
+ (**self).enabled(metadata)
+ }
+
+ fn log(&self, record: &Record) {
+ (**self).log(record);
+ }
+ fn flush(&self) {
+ (**self).flush();
+ }
+}
+
+#[cfg(feature = "std")]
+impl<T> Log for std::boxed::Box<T>
+where
+ T: ?Sized + Log,
+{
+ fn enabled(&self, metadata: &Metadata) -> bool {
+ self.as_ref().enabled(metadata)
+ }
+
+ fn log(&self, record: &Record) {
+ self.as_ref().log(record);
+ }
+ fn flush(&self) {
+ self.as_ref().flush();
+ }
+}
+
+#[cfg(feature = "std")]
+impl<T> Log for std::sync::Arc<T>
+where
+ T: ?Sized + Log,
+{
+ fn enabled(&self, metadata: &Metadata) -> bool {
+ self.as_ref().enabled(metadata)
+ }
+
+ fn log(&self, record: &Record) {
+ self.as_ref().log(record);
+ }
+ fn flush(&self) {
+ self.as_ref().flush();
+ }
+}
+
+/// Sets the global maximum log level.
+///
+/// Generally, this should only be called by the active logging implementation.
+///
+/// Note that `Trace` is the maximum level, because it provides the maximum amount of detail in the emitted logs.
+#[inline]
+#[cfg(target_has_atomic = "ptr")]
+pub fn set_max_level(level: LevelFilter) {
+ MAX_LOG_LEVEL_FILTER.store(level as usize, Ordering::Relaxed);
+}
+
+/// A thread-unsafe version of [`set_max_level`].
+///
+/// This function is available on all platforms, even those that do not have
+/// support for atomics that is needed by [`set_max_level`].
+///
+/// In almost all cases, [`set_max_level`] should be preferred.
+///
+/// # Safety
+///
+/// This function is only safe to call when it cannot race with any other
+/// calls to `set_max_level` or `set_max_level_racy`.
+///
+/// This can be upheld by (for example) making sure that **there are no other
+/// threads**, and (on embedded) that **interrupts are disabled**.
+///
+/// It is safe to use all other logging functions while this function runs
+/// (including all logging macros).
+///
+/// [`set_max_level`]: fn.set_max_level.html
+#[inline]
+pub unsafe fn set_max_level_racy(level: LevelFilter) {
+ // `MAX_LOG_LEVEL_FILTER` uses a `Cell` as the underlying primitive when a
+ // platform doesn't support `target_has_atomic = "ptr"`, so even though this looks the same
+ // as `set_max_level` it may have different safety properties.
+ MAX_LOG_LEVEL_FILTER.store(level as usize, Ordering::Relaxed);
+}
+
+/// Returns the current maximum log level.
+///
+/// The [`log!`], [`error!`], [`warn!`], [`info!`], [`debug!`], and [`trace!`] macros check
+/// this value and discard any message logged at a higher level. The maximum
+/// log level is set by the [`set_max_level`] function.
+///
+/// [`log!`]: macro.log.html
+/// [`error!`]: macro.error.html
+/// [`warn!`]: macro.warn.html
+/// [`info!`]: macro.info.html
+/// [`debug!`]: macro.debug.html
+/// [`trace!`]: macro.trace.html
+/// [`set_max_level`]: fn.set_max_level.html
+#[inline(always)]
+pub fn max_level() -> LevelFilter {
+ // Since `LevelFilter` is `repr(usize)`,
+ // this transmute is sound if and only if `MAX_LOG_LEVEL_FILTER`
+ // is set to a usize that is a valid discriminant for `LevelFilter`.
+ // Since `MAX_LOG_LEVEL_FILTER` is private, the only time it's set
+ // is by `set_max_level` above, i.e. by casting a `LevelFilter` to `usize`.
+ // So any usize stored in `MAX_LOG_LEVEL_FILTER` is a valid discriminant.
+ unsafe { mem::transmute(MAX_LOG_LEVEL_FILTER.load(Ordering::Relaxed)) }
+}
+
+/// Sets the global logger to a `Box<Log>`.
+///
+/// This is a simple convenience wrapper over `set_logger`, which takes a
+/// `Box<Log>` rather than a `&'static Log`. See the documentation for
+/// [`set_logger`] for more details.
+///
+/// Requires the `std` feature.
+///
+/// # Errors
+///
+/// An error is returned if a logger has already been set.
+///
+/// [`set_logger`]: fn.set_logger.html
+#[cfg(all(feature = "std", target_has_atomic = "ptr"))]
+pub fn set_boxed_logger(logger: Box<dyn Log>) -> Result<(), SetLoggerError> {
+ set_logger_inner(|| Box::leak(logger))
+}
+
+/// Sets the global logger to a `&'static Log`.
+///
+/// This function may only be called once in the lifetime of a program. Any log
+/// events that occur before the call to `set_logger` completes will be ignored.
+///
+/// This function does not typically need to be called manually. Logger
+/// implementations should provide an initialization method that installs the
+/// logger internally.
+///
+/// # Availability
+///
+/// This method is available even when the `std` feature is disabled. However,
+/// it is currently unavailable on `thumbv6` targets, which lack support for
+/// some atomic operations which are used by this function. Even on those
+/// targets, [`set_logger_racy`] will be available.
+///
+/// # Errors
+///
+/// An error is returned if a logger has already been set.
+///
+/// # Examples
+///
+/// ```
+/// use log::{error, info, warn, Record, Level, Metadata, LevelFilter};
+///
+/// static MY_LOGGER: MyLogger = MyLogger;
+///
+/// struct MyLogger;
+///
+/// impl log::Log for MyLogger {
+/// fn enabled(&self, metadata: &Metadata) -> bool {
+/// metadata.level() <= Level::Info
+/// }
+///
+/// fn log(&self, record: &Record) {
+/// if self.enabled(record.metadata()) {
+/// println!("{} - {}", record.level(), record.args());
+/// }
+/// }
+/// fn flush(&self) {}
+/// }
+///
+/// # fn main(){
+/// log::set_logger(&MY_LOGGER).unwrap();
+/// log::set_max_level(LevelFilter::Info);
+///
+/// info!("hello log");
+/// warn!("warning");
+/// error!("oops");
+/// # }
+/// ```
+///
+/// [`set_logger_racy`]: fn.set_logger_racy.html
+#[cfg(target_has_atomic = "ptr")]
+pub fn set_logger(logger: &'static dyn Log) -> Result<(), SetLoggerError> {
+ set_logger_inner(|| logger)
+}
+
+#[cfg(target_has_atomic = "ptr")]
+fn set_logger_inner<F>(make_logger: F) -> Result<(), SetLoggerError>
+where
+ F: FnOnce() -> &'static dyn Log,
+{
+ match STATE.compare_exchange(
+ UNINITIALIZED,
+ INITIALIZING,
+ Ordering::Acquire,
+ Ordering::Relaxed,
+ ) {
+ Ok(UNINITIALIZED) => {
+ unsafe {
+ LOGGER = make_logger();
+ }
+ STATE.store(INITIALIZED, Ordering::Release);
+ Ok(())
+ }
+ Err(INITIALIZING) => {
+ while STATE.load(Ordering::Relaxed) == INITIALIZING {
+ std::hint::spin_loop();
+ }
+ Err(SetLoggerError(()))
+ }
+ _ => Err(SetLoggerError(())),
+ }
+}
+
+/// A thread-unsafe version of [`set_logger`].
+///
+/// This function is available on all platforms, even those that do not have
+/// support for atomics that is needed by [`set_logger`].
+///
+/// In almost all cases, [`set_logger`] should be preferred.
+///
+/// # Safety
+///
+/// This function is only safe to call when it cannot race with any other
+/// calls to `set_logger` or `set_logger_racy`.
+///
+/// This can be upheld by (for example) making sure that **there are no other
+/// threads**, and (on embedded) that **interrupts are disabled**.
+///
+/// It is safe to use other logging functions while this function runs
+/// (including all logging macros).
+///
+/// [`set_logger`]: fn.set_logger.html
+pub unsafe fn set_logger_racy(logger: &'static dyn Log) -> Result<(), SetLoggerError> {
+ match STATE.load(Ordering::Acquire) {
+ UNINITIALIZED => {
+ LOGGER = logger;
+ STATE.store(INITIALIZED, Ordering::Release);
+ Ok(())
+ }
+ INITIALIZING => {
+ // This is just plain UB, since we were racing another initialization function
+ unreachable!("set_logger_racy must not be used with other initialization functions")
+ }
+ _ => Err(SetLoggerError(())),
+ }
+}
+
+/// The type returned by [`set_logger`] if [`set_logger`] has already been called.
+///
+/// [`set_logger`]: fn.set_logger.html
+#[allow(missing_copy_implementations)]
+#[derive(Debug)]
+pub struct SetLoggerError(());
+
+impl fmt::Display for SetLoggerError {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.write_str(SET_LOGGER_ERROR)
+ }
+}
+
+// The Error trait is not available in libcore
+#[cfg(feature = "std")]
+impl error::Error for SetLoggerError {}
+
+/// The type returned by [`from_str`] when the string doesn't match any of the log levels.
+///
+/// [`from_str`]: https://doc.rust-lang.org/std/str/trait.FromStr.html#tymethod.from_str
+#[allow(missing_copy_implementations)]
+#[derive(Debug, PartialEq, Eq)]
+pub struct ParseLevelError(());
+
+impl fmt::Display for ParseLevelError {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.write_str(LEVEL_PARSE_ERROR)
+ }
+}
+
+// The Error trait is not available in libcore
+#[cfg(feature = "std")]
+impl error::Error for ParseLevelError {}
+
+/// Returns a reference to the logger.
+///
+/// If a logger has not been set, a no-op implementation is returned.
+pub fn logger() -> &'static dyn Log {
+ // Acquire memory ordering guarantees that current thread would see any
+ // memory writes that happened before store of the value
+ // into `STATE` with memory ordering `Release` or stronger.
+ //
+ // Since the value `INITIALIZED` is written only after `LOGGER` was
+ // initialized, observing it after `Acquire` load here makes both
+ // write to the `LOGGER` static and initialization of the logger
+ // internal state synchronized with current thread.
+ if STATE.load(Ordering::Acquire) != INITIALIZED {
+ static NOP: NopLogger = NopLogger;
+ &NOP
+ } else {
+ unsafe { LOGGER }
+ }
+}
+
+// WARNING: this is not part of the crate's public API and is subject to change at any time
+#[doc(hidden)]
+pub mod __private_api;
+
+/// The statically resolved maximum log level.
+///
+/// See the crate level documentation for information on how to configure this.
+///
+/// This value is checked by the log macros, but not by the `Log`ger returned by
+/// the [`logger`] function. Code that manually calls functions on that value
+/// should compare the level against this value.
+///
+/// [`logger`]: fn.logger.html
+pub const STATIC_MAX_LEVEL: LevelFilter = match cfg!(debug_assertions) {
+ false if cfg!(feature = "release_max_level_off") => LevelFilter::Off,
+ false if cfg!(feature = "release_max_level_error") => LevelFilter::Error,
+ false if cfg!(feature = "release_max_level_warn") => LevelFilter::Warn,
+ false if cfg!(feature = "release_max_level_info") => LevelFilter::Info,
+ false if cfg!(feature = "release_max_level_debug") => LevelFilter::Debug,
+ false if cfg!(feature = "release_max_level_trace") => LevelFilter::Trace,
+ _ if cfg!(feature = "max_level_off") => LevelFilter::Off,
+ _ if cfg!(feature = "max_level_error") => LevelFilter::Error,
+ _ if cfg!(feature = "max_level_warn") => LevelFilter::Warn,
+ _ if cfg!(feature = "max_level_info") => LevelFilter::Info,
+ _ if cfg!(feature = "max_level_debug") => LevelFilter::Debug,
+ _ => LevelFilter::Trace,
+};
+
+#[cfg(test)]
+mod tests {
+ use super::{Level, LevelFilter, ParseLevelError, STATIC_MAX_LEVEL};
+
+ #[test]
+ fn test_levelfilter_from_str() {
+ let tests = [
+ ("off", Ok(LevelFilter::Off)),
+ ("error", Ok(LevelFilter::Error)),
+ ("warn", Ok(LevelFilter::Warn)),
+ ("info", Ok(LevelFilter::Info)),
+ ("debug", Ok(LevelFilter::Debug)),
+ ("trace", Ok(LevelFilter::Trace)),
+ ("OFF", Ok(LevelFilter::Off)),
+ ("ERROR", Ok(LevelFilter::Error)),
+ ("WARN", Ok(LevelFilter::Warn)),
+ ("INFO", Ok(LevelFilter::Info)),
+ ("DEBUG", Ok(LevelFilter::Debug)),
+ ("TRACE", Ok(LevelFilter::Trace)),
+ ("asdf", Err(ParseLevelError(()))),
+ ];
+ for &(s, ref expected) in &tests {
+ assert_eq!(expected, &s.parse());
+ }
+ }
+
+ #[test]
+ fn test_level_from_str() {
+ let tests = [
+ ("OFF", Err(ParseLevelError(()))),
+ ("error", Ok(Level::Error)),
+ ("warn", Ok(Level::Warn)),
+ ("info", Ok(Level::Info)),
+ ("debug", Ok(Level::Debug)),
+ ("trace", Ok(Level::Trace)),
+ ("ERROR", Ok(Level::Error)),
+ ("WARN", Ok(Level::Warn)),
+ ("INFO", Ok(Level::Info)),
+ ("DEBUG", Ok(Level::Debug)),
+ ("TRACE", Ok(Level::Trace)),
+ ("asdf", Err(ParseLevelError(()))),
+ ];
+ for &(s, ref expected) in &tests {
+ assert_eq!(expected, &s.parse());
+ }
+ }
+
+ #[test]
+ fn test_level_as_str() {
+ let tests = &[
+ (Level::Error, "ERROR"),
+ (Level::Warn, "WARN"),
+ (Level::Info, "INFO"),
+ (Level::Debug, "DEBUG"),
+ (Level::Trace, "TRACE"),
+ ];
+ for (input, expected) in tests {
+ assert_eq!(*expected, input.as_str());
+ }
+ }
+
+ #[test]
+ fn test_level_show() {
+ assert_eq!("INFO", Level::Info.to_string());
+ assert_eq!("ERROR", Level::Error.to_string());
+ }
+
+ #[test]
+ fn test_levelfilter_show() {
+ assert_eq!("OFF", LevelFilter::Off.to_string());
+ assert_eq!("ERROR", LevelFilter::Error.to_string());
+ }
+
+ #[test]
+ fn test_cross_cmp() {
+ assert!(Level::Debug > LevelFilter::Error);
+ assert!(LevelFilter::Warn < Level::Trace);
+ assert!(LevelFilter::Off < Level::Error);
+ }
+
+ #[test]
+ fn test_cross_eq() {
+ assert!(Level::Error == LevelFilter::Error);
+ assert!(LevelFilter::Off != Level::Error);
+ assert!(Level::Trace == LevelFilter::Trace);
+ }
+
+ #[test]
+ fn test_to_level() {
+ assert_eq!(Some(Level::Error), LevelFilter::Error.to_level());
+ assert_eq!(None, LevelFilter::Off.to_level());
+ assert_eq!(Some(Level::Debug), LevelFilter::Debug.to_level());
+ }
+
+ #[test]
+ fn test_to_level_filter() {
+ assert_eq!(LevelFilter::Error, Level::Error.to_level_filter());
+ assert_eq!(LevelFilter::Trace, Level::Trace.to_level_filter());
+ }
+
+ #[test]
+ fn test_level_filter_as_str() {
+ let tests = &[
+ (LevelFilter::Off, "OFF"),
+ (LevelFilter::Error, "ERROR"),
+ (LevelFilter::Warn, "WARN"),
+ (LevelFilter::Info, "INFO"),
+ (LevelFilter::Debug, "DEBUG"),
+ (LevelFilter::Trace, "TRACE"),
+ ];
+ for (input, expected) in tests {
+ assert_eq!(*expected, input.as_str());
+ }
+ }
+
+ #[test]
+ #[cfg_attr(not(debug_assertions), ignore)]
+ fn test_static_max_level_debug() {
+ if cfg!(feature = "max_level_off") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Off);
+ } else if cfg!(feature = "max_level_error") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Error);
+ } else if cfg!(feature = "max_level_warn") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Warn);
+ } else if cfg!(feature = "max_level_info") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Info);
+ } else if cfg!(feature = "max_level_debug") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Debug);
+ } else {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Trace);
+ }
+ }
+
+ #[test]
+ #[cfg_attr(debug_assertions, ignore)]
+ fn test_static_max_level_release() {
+ if cfg!(feature = "release_max_level_off") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Off);
+ } else if cfg!(feature = "release_max_level_error") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Error);
+ } else if cfg!(feature = "release_max_level_warn") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Warn);
+ } else if cfg!(feature = "release_max_level_info") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Info);
+ } else if cfg!(feature = "release_max_level_debug") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Debug);
+ } else if cfg!(feature = "release_max_level_trace") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Trace);
+ } else if cfg!(feature = "max_level_off") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Off);
+ } else if cfg!(feature = "max_level_error") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Error);
+ } else if cfg!(feature = "max_level_warn") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Warn);
+ } else if cfg!(feature = "max_level_info") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Info);
+ } else if cfg!(feature = "max_level_debug") {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Debug);
+ } else {
+ assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Trace);
+ }
+ }
+
+ #[test]
+ #[cfg(feature = "std")]
+ fn test_error_trait() {
+ use super::SetLoggerError;
+ let e = SetLoggerError(());
+ assert_eq!(
+ &e.to_string(),
+ "attempted to set a logger after the logging system \
+ was already initialized"
+ );
+ }
+
+ #[test]
+ fn test_metadata_builder() {
+ use super::MetadataBuilder;
+ let target = "myApp";
+ let metadata_test = MetadataBuilder::new()
+ .level(Level::Debug)
+ .target(target)
+ .build();
+ assert_eq!(metadata_test.level(), Level::Debug);
+ assert_eq!(metadata_test.target(), "myApp");
+ }
+
+ #[test]
+ fn test_metadata_convenience_builder() {
+ use super::Metadata;
+ let target = "myApp";
+ let metadata_test = Metadata::builder()
+ .level(Level::Debug)
+ .target(target)
+ .build();
+ assert_eq!(metadata_test.level(), Level::Debug);
+ assert_eq!(metadata_test.target(), "myApp");
+ }
+
+ #[test]
+ fn test_record_builder() {
+ use super::{MetadataBuilder, RecordBuilder};
+ let target = "myApp";
+ let metadata = MetadataBuilder::new().target(target).build();
+ let fmt_args = format_args!("hello");
+ let record_test = RecordBuilder::new()
+ .args(fmt_args)
+ .metadata(metadata)
+ .module_path(Some("foo"))
+ .file(Some("bar"))
+ .line(Some(30))
+ .build();
+ assert_eq!(record_test.metadata().target(), "myApp");
+ assert_eq!(record_test.module_path(), Some("foo"));
+ assert_eq!(record_test.file(), Some("bar"));
+ assert_eq!(record_test.line(), Some(30));
+ }
+
+ #[test]
+ fn test_record_convenience_builder() {
+ use super::{Metadata, Record};
+ let target = "myApp";
+ let metadata = Metadata::builder().target(target).build();
+ let fmt_args = format_args!("hello");
+ let record_test = Record::builder()
+ .args(fmt_args)
+ .metadata(metadata)
+ .module_path(Some("foo"))
+ .file(Some("bar"))
+ .line(Some(30))
+ .build();
+ assert_eq!(record_test.target(), "myApp");
+ assert_eq!(record_test.module_path(), Some("foo"));
+ assert_eq!(record_test.file(), Some("bar"));
+ assert_eq!(record_test.line(), Some(30));
+ }
+
+ #[test]
+ fn test_record_complete_builder() {
+ use super::{Level, Record};
+ let target = "myApp";
+ let record_test = Record::builder()
+ .module_path(Some("foo"))
+ .file(Some("bar"))
+ .line(Some(30))
+ .target(target)
+ .level(Level::Error)
+ .build();
+ assert_eq!(record_test.target(), "myApp");
+ assert_eq!(record_test.level(), Level::Error);
+ assert_eq!(record_test.module_path(), Some("foo"));
+ assert_eq!(record_test.file(), Some("bar"));
+ assert_eq!(record_test.line(), Some(30));
+ }
+
+ #[test]
+ #[cfg(feature = "kv")]
+ fn test_record_key_values_builder() {
+ use super::Record;
+ use crate::kv::{self, VisitSource};
+
+ struct TestVisitSource {
+ seen_pairs: usize,
+ }
+
+ impl<'kvs> VisitSource<'kvs> for TestVisitSource {
+ fn visit_pair(
+ &mut self,
+ _: kv::Key<'kvs>,
+ _: kv::Value<'kvs>,
+ ) -> Result<(), kv::Error> {
+ self.seen_pairs += 1;
+ Ok(())
+ }
+ }
+
+ let kvs: &[(&str, i32)] = &[("a", 1), ("b", 2)];
+ let record_test = Record::builder().key_values(&kvs).build();
+
+ let mut visitor = TestVisitSource { seen_pairs: 0 };
+
+ record_test.key_values().visit(&mut visitor).unwrap();
+
+ assert_eq!(2, visitor.seen_pairs);
+ }
+
+ #[test]
+ #[cfg(feature = "kv")]
+ fn test_record_key_values_get_coerce() {
+ use super::Record;
+
+ let kvs: &[(&str, &str)] = &[("a", "1"), ("b", "2")];
+ let record = Record::builder().key_values(&kvs).build();
+
+ assert_eq!(
+ "2",
+ record
+ .key_values()
+ .get("b".into())
+ .expect("missing key")
+ .to_borrowed_str()
+ .expect("invalid value")
+ );
+ }
+
+ // Test that the `impl Log for Foo` blocks work
+ // This test mostly operates on a type level, so failures will be compile errors
+ #[test]
+ fn test_foreign_impl() {
+ use super::Log;
+ #[cfg(feature = "std")]
+ use std::sync::Arc;
+
+ fn assert_is_log<T: Log + ?Sized>() {}
+
+ assert_is_log::<&dyn Log>();
+
+ #[cfg(feature = "std")]
+ assert_is_log::<Box<dyn Log>>();
+
+ #[cfg(feature = "std")]
+ assert_is_log::<Arc<dyn Log>>();
+
+ // Assert these statements for all T: Log + ?Sized
+ #[allow(unused)]
+ fn forall<T: Log + ?Sized>() {
+ #[cfg(feature = "std")]
+ assert_is_log::<Box<T>>();
+
+ assert_is_log::<&T>();
+
+ #[cfg(feature = "std")]
+ assert_is_log::<Arc<T>>();
+ }
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/macros.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/macros.rs
new file mode 100644
index 0000000..87693f2
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/macros.rs
@@ -0,0 +1,367 @@
+// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// The standard logging macro.
+///
+/// This macro will generically log with the specified `Level` and `format!`
+/// based argument list.
+///
+/// # Examples
+///
+/// ```
+/// use log::{log, Level};
+///
+/// # fn main() {
+/// let data = (42, "Forty-two");
+/// let private_data = "private";
+///
+/// log!(Level::Error, "Received errors: {}, {}", data.0, data.1);
+/// log!(target: "app_events", Level::Warn, "App warning: {}, {}, {}",
+/// data.0, data.1, private_data);
+/// # }
+/// ```
+#[macro_export]
+macro_rules! log {
+ // log!(target: "my_target", Level::Info, key1:? = 42, key2 = true; "a {} event", "log");
+ (target: $target:expr, $lvl:expr, $($key:tt $(:$capture:tt)? $(= $value:expr)?),+; $($arg:tt)+) => ({
+ let lvl = $lvl;
+ if lvl <= $crate::STATIC_MAX_LEVEL && lvl <= $crate::max_level() {
+ $crate::__private_api::log::<&_>(
+ $crate::__private_api::format_args!($($arg)+),
+ lvl,
+ &($target, $crate::__private_api::module_path!(), $crate::__private_api::loc()),
+ &[$(($crate::__log_key!($key), $crate::__log_value!($key $(:$capture)* = $($value)*))),+]
+ );
+ }
+ });
+
+ // log!(target: "my_target", Level::Info, "a {} event", "log");
+ (target: $target:expr, $lvl:expr, $($arg:tt)+) => ({
+ let lvl = $lvl;
+ if lvl <= $crate::STATIC_MAX_LEVEL && lvl <= $crate::max_level() {
+ $crate::__private_api::log(
+ $crate::__private_api::format_args!($($arg)+),
+ lvl,
+ &($target, $crate::__private_api::module_path!(), $crate::__private_api::loc()),
+ (),
+ );
+ }
+ });
+
+ // log!(Level::Info, "a log event")
+ ($lvl:expr, $($arg:tt)+) => ($crate::log!(target: $crate::__private_api::module_path!(), $lvl, $($arg)+));
+}
+
+/// Logs a message at the error level.
+///
+/// # Examples
+///
+/// ```
+/// use log::error;
+///
+/// # fn main() {
+/// let (err_info, port) = ("No connection", 22);
+///
+/// error!("Error: {err_info} on port {port}");
+/// error!(target: "app_events", "App Error: {err_info}, Port: {port}");
+/// # }
+/// ```
+#[macro_export]
+macro_rules! error {
+ // error!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+ // error!(target: "my_target", "a {} event", "log")
+ (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Error, $($arg)+));
+
+ // error!("a {} event", "log")
+ ($($arg:tt)+) => ($crate::log!($crate::Level::Error, $($arg)+))
+}
+
+/// Logs a message at the warn level.
+///
+/// # Examples
+///
+/// ```
+/// use log::warn;
+///
+/// # fn main() {
+/// let warn_description = "Invalid Input";
+///
+/// warn!("Warning! {warn_description}!");
+/// warn!(target: "input_events", "App received warning: {warn_description}");
+/// # }
+/// ```
+#[macro_export]
+macro_rules! warn {
+ // warn!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+ // warn!(target: "my_target", "a {} event", "log")
+ (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Warn, $($arg)+));
+
+ // warn!("a {} event", "log")
+ ($($arg:tt)+) => ($crate::log!($crate::Level::Warn, $($arg)+))
+}
+
+/// Logs a message at the info level.
+///
+/// # Examples
+///
+/// ```
+/// use log::info;
+///
+/// # fn main() {
+/// # struct Connection { port: u32, speed: f32 }
+/// let conn_info = Connection { port: 40, speed: 3.20 };
+///
+/// info!("Connected to port {} at {} Mb/s", conn_info.port, conn_info.speed);
+/// info!(target: "connection_events", "Successful connection, port: {}, speed: {}",
+/// conn_info.port, conn_info.speed);
+/// # }
+/// ```
+#[macro_export]
+macro_rules! info {
+ // info!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+ // info!(target: "my_target", "a {} event", "log")
+ (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Info, $($arg)+));
+
+ // info!("a {} event", "log")
+ ($($arg:tt)+) => ($crate::log!($crate::Level::Info, $($arg)+))
+}
+
+/// Logs a message at the debug level.
+///
+/// # Examples
+///
+/// ```
+/// use log::debug;
+///
+/// # fn main() {
+/// # struct Position { x: f32, y: f32 }
+/// let pos = Position { x: 3.234, y: -1.223 };
+///
+/// debug!("New position: x: {}, y: {}", pos.x, pos.y);
+/// debug!(target: "app_events", "New position: x: {}, y: {}", pos.x, pos.y);
+/// # }
+/// ```
+#[macro_export]
+macro_rules! debug {
+ // debug!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+ // debug!(target: "my_target", "a {} event", "log")
+ (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Debug, $($arg)+));
+
+ // debug!("a {} event", "log")
+ ($($arg:tt)+) => ($crate::log!($crate::Level::Debug, $($arg)+))
+}
+
+/// Logs a message at the trace level.
+///
+/// # Examples
+///
+/// ```
+/// use log::trace;
+///
+/// # fn main() {
+/// # struct Position { x: f32, y: f32 }
+/// let pos = Position { x: 3.234, y: -1.223 };
+///
+/// trace!("Position is: x: {}, y: {}", pos.x, pos.y);
+/// trace!(target: "app_events", "x is {} and y is {}",
+/// if pos.x >= 0.0 { "positive" } else { "negative" },
+/// if pos.y >= 0.0 { "positive" } else { "negative" });
+/// # }
+/// ```
+#[macro_export]
+macro_rules! trace {
+ // trace!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+ // trace!(target: "my_target", "a {} event", "log")
+ (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Trace, $($arg)+));
+
+ // trace!("a {} event", "log")
+ ($($arg:tt)+) => ($crate::log!($crate::Level::Trace, $($arg)+))
+}
+
+/// Determines if a message logged at the specified level in that module will
+/// be logged.
+///
+/// This can be used to avoid expensive computation of log message arguments if
+/// the message would be ignored anyway.
+///
+/// # Examples
+///
+/// ```
+/// use log::Level::Debug;
+/// use log::{debug, log_enabled};
+///
+/// # fn foo() {
+/// if log_enabled!(Debug) {
+/// let data = expensive_call();
+/// debug!("expensive debug data: {} {}", data.x, data.y);
+/// }
+/// if log_enabled!(target: "Global", Debug) {
+/// let data = expensive_call();
+/// debug!(target: "Global", "expensive debug data: {} {}", data.x, data.y);
+/// }
+/// # }
+/// # struct Data { x: u32, y: u32 }
+/// # fn expensive_call() -> Data { Data { x: 0, y: 0 } }
+/// # fn main() {}
+/// ```
+#[macro_export]
+macro_rules! log_enabled {
+ (target: $target:expr, $lvl:expr) => {{
+ let lvl = $lvl;
+ lvl <= $crate::STATIC_MAX_LEVEL
+ && lvl <= $crate::max_level()
+ && $crate::__private_api::enabled(lvl, $target)
+ }};
+ ($lvl:expr) => {
+ $crate::log_enabled!(target: $crate::__private_api::module_path!(), $lvl)
+ };
+}
+
+// These macros use a pattern of #[cfg]s to produce nicer error
+// messages when log features aren't available
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv")]
+macro_rules! __log_key {
+ // key1 = 42
+ ($($args:ident)*) => {
+ $crate::__private_api::stringify!($($args)*)
+ };
+ // "key1" = 42
+ ($($args:expr)*) => {
+ $($args)*
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv"))]
+macro_rules! __log_key {
+ ($($args:tt)*) => {
+ compile_error!("key value support requires the `kv` feature of `log`")
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv")]
+macro_rules! __log_value {
+ // Entrypoint
+ ($key:tt = $args:expr) => {
+ $crate::__log_value!(($args):value)
+ };
+ ($key:tt :$capture:tt = $args:expr) => {
+ $crate::__log_value!(($args):$capture)
+ };
+ ($key:ident =) => {
+ $crate::__log_value!(($key):value)
+ };
+ ($key:ident :$capture:tt =) => {
+ $crate::__log_value!(($key):$capture)
+ };
+ // ToValue
+ (($args:expr):value) => {
+ $crate::__private_api::capture_to_value(&&$args)
+ };
+ // Debug
+ (($args:expr):?) => {
+ $crate::__private_api::capture_debug(&&$args)
+ };
+ (($args:expr):debug) => {
+ $crate::__private_api::capture_debug(&&$args)
+ };
+ // Display
+ (($args:expr):%) => {
+ $crate::__private_api::capture_display(&&$args)
+ };
+ (($args:expr):display) => {
+ $crate::__private_api::capture_display(&&$args)
+ };
+ //Error
+ (($args:expr):err) => {
+ $crate::__log_value_error!($args)
+ };
+ // sval::Value
+ (($args:expr):sval) => {
+ $crate::__log_value_sval!($args)
+ };
+ // serde::Serialize
+ (($args:expr):serde) => {
+ $crate::__log_value_serde!($args)
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv"))]
+macro_rules! __log_value {
+ ($($args:tt)*) => {
+ compile_error!("key value support requires the `kv` feature of `log`")
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv_sval")]
+macro_rules! __log_value_sval {
+ ($args:expr) => {
+ $crate::__private_api::capture_sval(&&$args)
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv_sval"))]
+macro_rules! __log_value_sval {
+ ($args:expr) => {
+ compile_error!("capturing values as `sval::Value` requites the `kv_sval` feature of `log`")
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv_serde")]
+macro_rules! __log_value_serde {
+ ($args:expr) => {
+ $crate::__private_api::capture_serde(&&$args)
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv_serde"))]
+macro_rules! __log_value_serde {
+ ($args:expr) => {
+ compile_error!(
+ "capturing values as `serde::Serialize` requites the `kv_serde` feature of `log`"
+ )
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv_std")]
+macro_rules! __log_value_error {
+ ($args:expr) => {
+ $crate::__private_api::capture_error(&$args)
+ };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv_std"))]
+macro_rules! __log_value_error {
+ ($args:expr) => {
+ compile_error!(
+ "capturing values as `std::error::Error` requites the `kv_std` feature of `log`"
+ )
+ };
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/serde.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/serde.rs
new file mode 100644
index 0000000..63bef7f
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/src/serde.rs
@@ -0,0 +1,397 @@
+#![cfg(feature = "serde")]
+
+use serde::de::{
+ Deserialize, DeserializeSeed, Deserializer, EnumAccess, Error, Unexpected, VariantAccess,
+ Visitor,
+};
+use serde::ser::{Serialize, Serializer};
+
+use crate::{Level, LevelFilter, LOG_LEVEL_NAMES};
+
+use std::fmt;
+use std::str::{self, FromStr};
+
+// The Deserialize impls are handwritten to be case insensitive using FromStr.
+
+impl Serialize for Level {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ match *self {
+ Level::Error => serializer.serialize_unit_variant("Level", 0, "ERROR"),
+ Level::Warn => serializer.serialize_unit_variant("Level", 1, "WARN"),
+ Level::Info => serializer.serialize_unit_variant("Level", 2, "INFO"),
+ Level::Debug => serializer.serialize_unit_variant("Level", 3, "DEBUG"),
+ Level::Trace => serializer.serialize_unit_variant("Level", 4, "TRACE"),
+ }
+ }
+}
+
+impl<'de> Deserialize<'de> for Level {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ struct LevelIdentifier;
+
+ impl<'de> Visitor<'de> for LevelIdentifier {
+ type Value = Level;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("log level")
+ }
+
+ fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
+ where
+ E: Error,
+ {
+ let variant = LOG_LEVEL_NAMES[1..]
+ .get(v as usize)
+ .ok_or_else(|| Error::invalid_value(Unexpected::Unsigned(v), &self))?;
+
+ self.visit_str(variant)
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: Error,
+ {
+ // Case insensitive.
+ FromStr::from_str(s).map_err(|_| Error::unknown_variant(s, &LOG_LEVEL_NAMES[1..]))
+ }
+
+ fn visit_bytes<E>(self, value: &[u8]) -> Result<Self::Value, E>
+ where
+ E: Error,
+ {
+ let variant = str::from_utf8(value)
+ .map_err(|_| Error::invalid_value(Unexpected::Bytes(value), &self))?;
+
+ self.visit_str(variant)
+ }
+ }
+
+ impl<'de> DeserializeSeed<'de> for LevelIdentifier {
+ type Value = Level;
+
+ fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ deserializer.deserialize_identifier(LevelIdentifier)
+ }
+ }
+
+ struct LevelEnum;
+
+ impl<'de> Visitor<'de> for LevelEnum {
+ type Value = Level;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("log level")
+ }
+
+ fn visit_enum<A>(self, value: A) -> Result<Self::Value, A::Error>
+ where
+ A: EnumAccess<'de>,
+ {
+ let (level, variant) = value.variant_seed(LevelIdentifier)?;
+ // Every variant is a unit variant.
+ variant.unit_variant()?;
+ Ok(level)
+ }
+ }
+
+ deserializer.deserialize_enum("Level", &LOG_LEVEL_NAMES[1..], LevelEnum)
+ }
+}
+
+impl Serialize for LevelFilter {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ match *self {
+ LevelFilter::Off => serializer.serialize_unit_variant("LevelFilter", 0, "OFF"),
+ LevelFilter::Error => serializer.serialize_unit_variant("LevelFilter", 1, "ERROR"),
+ LevelFilter::Warn => serializer.serialize_unit_variant("LevelFilter", 2, "WARN"),
+ LevelFilter::Info => serializer.serialize_unit_variant("LevelFilter", 3, "INFO"),
+ LevelFilter::Debug => serializer.serialize_unit_variant("LevelFilter", 4, "DEBUG"),
+ LevelFilter::Trace => serializer.serialize_unit_variant("LevelFilter", 5, "TRACE"),
+ }
+ }
+}
+
+impl<'de> Deserialize<'de> for LevelFilter {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ struct LevelFilterIdentifier;
+
+ impl<'de> Visitor<'de> for LevelFilterIdentifier {
+ type Value = LevelFilter;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("log level filter")
+ }
+
+ fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
+ where
+ E: Error,
+ {
+ let variant = LOG_LEVEL_NAMES
+ .get(v as usize)
+ .ok_or_else(|| Error::invalid_value(Unexpected::Unsigned(v), &self))?;
+
+ self.visit_str(variant)
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: Error,
+ {
+ // Case insensitive.
+ FromStr::from_str(s).map_err(|_| Error::unknown_variant(s, &LOG_LEVEL_NAMES))
+ }
+
+ fn visit_bytes<E>(self, value: &[u8]) -> Result<Self::Value, E>
+ where
+ E: Error,
+ {
+ let variant = str::from_utf8(value)
+ .map_err(|_| Error::invalid_value(Unexpected::Bytes(value), &self))?;
+
+ self.visit_str(variant)
+ }
+ }
+
+ impl<'de> DeserializeSeed<'de> for LevelFilterIdentifier {
+ type Value = LevelFilter;
+
+ fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ deserializer.deserialize_identifier(LevelFilterIdentifier)
+ }
+ }
+
+ struct LevelFilterEnum;
+
+ impl<'de> Visitor<'de> for LevelFilterEnum {
+ type Value = LevelFilter;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("log level filter")
+ }
+
+ fn visit_enum<A>(self, value: A) -> Result<Self::Value, A::Error>
+ where
+ A: EnumAccess<'de>,
+ {
+ let (level_filter, variant) = value.variant_seed(LevelFilterIdentifier)?;
+ // Every variant is a unit variant.
+ variant.unit_variant()?;
+ Ok(level_filter)
+ }
+ }
+
+ deserializer.deserialize_enum("LevelFilter", &LOG_LEVEL_NAMES, LevelFilterEnum)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{Level, LevelFilter};
+ use serde_test::{assert_de_tokens, assert_de_tokens_error, assert_tokens, Token};
+
+ fn level_token(variant: &'static str) -> Token {
+ Token::UnitVariant {
+ name: "Level",
+ variant,
+ }
+ }
+
+ fn level_bytes_tokens(variant: &'static [u8]) -> [Token; 3] {
+ [
+ Token::Enum { name: "Level" },
+ Token::Bytes(variant),
+ Token::Unit,
+ ]
+ }
+
+ fn level_variant_tokens(variant: u32) -> [Token; 3] {
+ [
+ Token::Enum { name: "Level" },
+ Token::U32(variant),
+ Token::Unit,
+ ]
+ }
+
+ fn level_filter_token(variant: &'static str) -> Token {
+ Token::UnitVariant {
+ name: "LevelFilter",
+ variant,
+ }
+ }
+
+ fn level_filter_bytes_tokens(variant: &'static [u8]) -> [Token; 3] {
+ [
+ Token::Enum {
+ name: "LevelFilter",
+ },
+ Token::Bytes(variant),
+ Token::Unit,
+ ]
+ }
+
+ fn level_filter_variant_tokens(variant: u32) -> [Token; 3] {
+ [
+ Token::Enum {
+ name: "LevelFilter",
+ },
+ Token::U32(variant),
+ Token::Unit,
+ ]
+ }
+
+ #[test]
+ fn test_level_ser_de() {
+ let cases = &[
+ (Level::Error, [level_token("ERROR")]),
+ (Level::Warn, [level_token("WARN")]),
+ (Level::Info, [level_token("INFO")]),
+ (Level::Debug, [level_token("DEBUG")]),
+ (Level::Trace, [level_token("TRACE")]),
+ ];
+
+ for (s, expected) in cases {
+ assert_tokens(s, expected);
+ }
+ }
+
+ #[test]
+ fn test_level_case_insensitive() {
+ let cases = &[
+ (Level::Error, [level_token("error")]),
+ (Level::Warn, [level_token("warn")]),
+ (Level::Info, [level_token("info")]),
+ (Level::Debug, [level_token("debug")]),
+ (Level::Trace, [level_token("trace")]),
+ ];
+
+ for (s, expected) in cases {
+ assert_de_tokens(s, expected);
+ }
+ }
+
+ #[test]
+ fn test_level_de_bytes() {
+ let cases = &[
+ (Level::Error, level_bytes_tokens(b"ERROR")),
+ (Level::Warn, level_bytes_tokens(b"WARN")),
+ (Level::Info, level_bytes_tokens(b"INFO")),
+ (Level::Debug, level_bytes_tokens(b"DEBUG")),
+ (Level::Trace, level_bytes_tokens(b"TRACE")),
+ ];
+
+ for (value, tokens) in cases {
+ assert_de_tokens(value, tokens);
+ }
+ }
+
+ #[test]
+ fn test_level_de_variant_index() {
+ let cases = &[
+ (Level::Error, level_variant_tokens(0)),
+ (Level::Warn, level_variant_tokens(1)),
+ (Level::Info, level_variant_tokens(2)),
+ (Level::Debug, level_variant_tokens(3)),
+ (Level::Trace, level_variant_tokens(4)),
+ ];
+
+ for (value, tokens) in cases {
+ assert_de_tokens(value, tokens);
+ }
+ }
+
+ #[test]
+ fn test_level_de_error() {
+ let msg = "unknown variant `errorx`, expected one of \
+ `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`";
+ assert_de_tokens_error::<Level>(&[level_token("errorx")], msg);
+ }
+
+ #[test]
+ fn test_level_filter_ser_de() {
+ let cases = &[
+ (LevelFilter::Off, [level_filter_token("OFF")]),
+ (LevelFilter::Error, [level_filter_token("ERROR")]),
+ (LevelFilter::Warn, [level_filter_token("WARN")]),
+ (LevelFilter::Info, [level_filter_token("INFO")]),
+ (LevelFilter::Debug, [level_filter_token("DEBUG")]),
+ (LevelFilter::Trace, [level_filter_token("TRACE")]),
+ ];
+
+ for (s, expected) in cases {
+ assert_tokens(s, expected);
+ }
+ }
+
+ #[test]
+ fn test_level_filter_case_insensitive() {
+ let cases = &[
+ (LevelFilter::Off, [level_filter_token("off")]),
+ (LevelFilter::Error, [level_filter_token("error")]),
+ (LevelFilter::Warn, [level_filter_token("warn")]),
+ (LevelFilter::Info, [level_filter_token("info")]),
+ (LevelFilter::Debug, [level_filter_token("debug")]),
+ (LevelFilter::Trace, [level_filter_token("trace")]),
+ ];
+
+ for (s, expected) in cases {
+ assert_de_tokens(s, expected);
+ }
+ }
+
+ #[test]
+ fn test_level_filter_de_bytes() {
+ let cases = &[
+ (LevelFilter::Off, level_filter_bytes_tokens(b"OFF")),
+ (LevelFilter::Error, level_filter_bytes_tokens(b"ERROR")),
+ (LevelFilter::Warn, level_filter_bytes_tokens(b"WARN")),
+ (LevelFilter::Info, level_filter_bytes_tokens(b"INFO")),
+ (LevelFilter::Debug, level_filter_bytes_tokens(b"DEBUG")),
+ (LevelFilter::Trace, level_filter_bytes_tokens(b"TRACE")),
+ ];
+
+ for (value, tokens) in cases {
+ assert_de_tokens(value, tokens);
+ }
+ }
+
+ #[test]
+ fn test_level_filter_de_variant_index() {
+ let cases = &[
+ (LevelFilter::Off, level_filter_variant_tokens(0)),
+ (LevelFilter::Error, level_filter_variant_tokens(1)),
+ (LevelFilter::Warn, level_filter_variant_tokens(2)),
+ (LevelFilter::Info, level_filter_variant_tokens(3)),
+ (LevelFilter::Debug, level_filter_variant_tokens(4)),
+ (LevelFilter::Trace, level_filter_variant_tokens(5)),
+ ];
+
+ for (value, tokens) in cases {
+ assert_de_tokens(value, tokens);
+ }
+ }
+
+ #[test]
+ fn test_level_filter_de_error() {
+ let msg = "unknown variant `errorx`, expected one of \
+ `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`";
+ assert_de_tokens_error::<LevelFilter>(&[level_filter_token("errorx")], msg);
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/triagebot.toml b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/triagebot.toml
new file mode 100644
index 0000000..fa0824a
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/log/triagebot.toml
@@ -0,0 +1 @@
+[assign]
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/.cargo-checksum.json b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/.cargo-checksum.json
new file mode 100644
index 0000000..bcbce9e
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"d4ff1d54f23cde7d35174635e8db5e6aee0d61b846d2fccbf0d28146246b3a28","README.md":"fc6752d50a57acbf3d65581c0d2f3d0da8c5fef5735a12857660c9e9687751d0","src/facts.rs":"9fad471b6ba5f63b8a1367002fee2b27fc3b3b893680eca390bc2616718f8915","src/lib.rs":"19ef0fd2d054b3a48c11ff4007734b7940ca739d3a9d5083d3a03b4d982cdb99","src/output/datafrog_opt.rs":"c75fa04ed1cc1a5b59f9405ce959af5950d37ae57876cc9510adc7c013b25af5","src/output/initialization.rs":"b9665c1397ff5e1cc1a93e9645bec0bed672ea9822c4dd32fc545ecbd3f80258","src/output/liveness.rs":"b68c9edd17feebff7d0b006caaf8197b5c30320b1a8cdcbe653bd7218954dd4f","src/output/location_insensitive.rs":"eb7c495ec38768104b8877de66341aaca210cdad824cae948f3fd7cf4ba858d0","src/output/mod.rs":"968f8547954a4444f59f3c056a9b742aa59ede3a90bb9b6fe08ba506fcc6bce5","src/output/naive.rs":"b345c2beb8a2f79bc482d131954bea4f23e53a3ce8270f64b8e940f0de376730"},"package":"c4e8e505342045d397d0b6674dcb82d6faf5cf40484d30eeb88fc82ef14e903f"} \ No newline at end of file
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/Cargo.toml b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/Cargo.toml
new file mode 100644
index 0000000..e3a8f74
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/Cargo.toml
@@ -0,0 +1,29 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "polonius-engine"
+version = "0.13.0"
+authors = ["The Rust Project Developers", "Polonius Developers"]
+description = "Core definition for the Rust borrow checker"
+readme = "README.md"
+keywords = ["compiler", "borrowck", "datalog"]
+license = "Apache-2.0/MIT"
+repository = "https://github.com/rust-lang-nursery/polonius"
+[dependencies.datafrog]
+version = "2.0.0"
+
+[dependencies.log]
+version = "0.4"
+
+[dependencies.rustc-hash]
+version = "1.0.0"
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/README.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/README.md
new file mode 100644
index 0000000..d88295e
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/README.md
@@ -0,0 +1,6 @@
+This is a core library that models the borrow check. It implements the
+analysis [described in this blogpost][post]. This library is intended
+for use both by rustc and by the polonius crate, which is a distinct
+front-end intended for testing, profiling, etc.
+
+[post]: http://smallcultfollowing.com/babysteps/blog/2018/04/27/an-alias-based-formulation-of-the-borrow-checker/
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/facts.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/facts.rs
new file mode 100644
index 0000000..442ba18
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/facts.rs
@@ -0,0 +1,129 @@
+use std::fmt::Debug;
+use std::hash::Hash;
+
+/// The "facts" which are the basis of the NLL borrow analysis.
+#[derive(Clone, Debug)]
+pub struct AllFacts<T: FactTypes> {
+ /// `loan_issued_at(origin, loan, point)` indicates that the `loan` was "issued"
+ /// at the given `point`, creating a reference with the `origin`.
+ /// Effectively, `origin` may refer to data from `loan` starting at `point` (this is usually
+ /// the point *after* a borrow rvalue).
+ pub loan_issued_at: Vec<(T::Origin, T::Loan, T::Point)>,
+
+ /// `universal_region(origin)` -- this is a "free region" within fn body
+ pub universal_region: Vec<T::Origin>,
+
+ /// `cfg_edge(point1, point2)` for each edge `point1 -> point2` in the control flow
+ pub cfg_edge: Vec<(T::Point, T::Point)>,
+
+ /// `loan_killed_at(loan, point)` when some prefix of the path borrowed at `loan`
+ /// is assigned at `point`.
+ /// Indicates that the path borrowed by the `loan` has changed in some way that the loan no
+ /// longer needs to be tracked. (In particular, mutations to the path that was borrowed
+ /// no longer invalidate the loan)
+ pub loan_killed_at: Vec<(T::Loan, T::Point)>,
+
+ /// `subset_base(origin1, origin2, point)` when we require `origin1@point: origin2@point`.
+ /// Indicates that `origin1 <= origin2` -- i.e., the set of loans in `origin1` are a subset
+ /// of those in `origin2`.
+ pub subset_base: Vec<(T::Origin, T::Origin, T::Point)>,
+
+ /// `loan_invalidated_at(point, loan)` indicates that the `loan` is invalidated by some action
+ /// taking place at `point`; if any origin that references this loan is live, this is an error.
+ pub loan_invalidated_at: Vec<(T::Point, T::Loan)>,
+
+ /// `var_used_at(var, point)` when the variable `var` is used for anything
+ /// but a drop at `point`
+ pub var_used_at: Vec<(T::Variable, T::Point)>,
+
+ /// `var_defined_at(var, point)` when the variable `var` is overwritten at `point`
+ pub var_defined_at: Vec<(T::Variable, T::Point)>,
+
+ /// `var_dropped_at(var, point)` when the variable `var` is used in a drop at `point`
+ pub var_dropped_at: Vec<(T::Variable, T::Point)>,
+
+ /// `use_of_var_derefs_origin(variable, origin)`: References with the given
+ /// `origin` may be dereferenced when the `variable` is used.
+ ///
+ /// In rustc, we generate this whenever the type of the variable includes the
+ /// given origin.
+ pub use_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+
+ /// `drop_of_var_derefs_origin(var, origin)` when the type of `var` includes
+ /// the `origin` and uses it when dropping
+ pub drop_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+
+ /// `child_path(child, parent)` when the path `child` is the direct child of
+ /// `parent`, e.g. `child_path(x.y, x)`, but not `child_path(x.y.z, x)`.
+ pub child_path: Vec<(T::Path, T::Path)>,
+
+ /// `path_is_var(path, var)` the root path `path` starting in variable `var`.
+ pub path_is_var: Vec<(T::Path, T::Variable)>,
+
+ /// `path_assigned_at_base(path, point)` when the `path` was initialized at point
+ /// `point`. This fact is only emitted for a prefix `path`, and not for the
+ /// implicit initialization of all of `path`'s children. E.g. a statement like
+ /// `x.y = 3` at `point` would give the fact `path_assigned_at_base(x.y, point)` (but
+ /// neither `path_assigned_at_base(x.y.z, point)` nor `path_assigned_at_base(x, point)`).
+ pub path_assigned_at_base: Vec<(T::Path, T::Point)>,
+
+ /// `path_moved_at_base(path, point)` when the `path` was moved at `point`. The
+ /// same logic is applied as for `path_assigned_at_base` above.
+ pub path_moved_at_base: Vec<(T::Path, T::Point)>,
+
+ /// `path_accessed_at_base(path, point)` when the `path` was accessed at point
+ /// `point`. The same logic as for `path_assigned_at_base` and `path_moved_at_base` applies.
+ pub path_accessed_at_base: Vec<(T::Path, T::Point)>,
+
+ /// These reflect the `'a: 'b` relations that are either declared by the user on function
+ /// declarations or which are inferred via implied bounds.
+ /// For example: `fn foo<'a, 'b: 'a, 'c>(x: &'c &'a u32)` would have two entries:
+ /// - one for the user-supplied subset `'b: 'a`
+ /// - and one for the `'a: 'c` implied bound from the `x` parameter,
+ /// (note that the transitive relation `'b: 'c` is not necessarily included
+ /// explicitly, but rather inferred by polonius).
+ pub known_placeholder_subset: Vec<(T::Origin, T::Origin)>,
+
+ /// `placeholder(origin, loan)` describes a placeholder `origin`, with its associated
+ /// placeholder `loan`.
+ pub placeholder: Vec<(T::Origin, T::Loan)>,
+}
+
+impl<T: FactTypes> Default for AllFacts<T> {
+ fn default() -> Self {
+ AllFacts {
+ loan_issued_at: Vec::default(),
+ universal_region: Vec::default(),
+ cfg_edge: Vec::default(),
+ loan_killed_at: Vec::default(),
+ subset_base: Vec::default(),
+ loan_invalidated_at: Vec::default(),
+ var_used_at: Vec::default(),
+ var_defined_at: Vec::default(),
+ var_dropped_at: Vec::default(),
+ use_of_var_derefs_origin: Vec::default(),
+ drop_of_var_derefs_origin: Vec::default(),
+ child_path: Vec::default(),
+ path_is_var: Vec::default(),
+ path_assigned_at_base: Vec::default(),
+ path_moved_at_base: Vec::default(),
+ path_accessed_at_base: Vec::default(),
+ known_placeholder_subset: Vec::default(),
+ placeholder: Vec::default(),
+ }
+ }
+}
+
+pub trait Atom:
+ From<usize> + Into<usize> + Copy + Clone + Debug + Eq + Ord + Hash + 'static
+{
+ fn index(self) -> usize;
+}
+
+pub trait FactTypes: Copy + Clone + Debug {
+ type Origin: Atom;
+ type Loan: Atom;
+ type Point: Atom;
+ type Variable: Atom;
+ type Path: Atom;
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/lib.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/lib.rs
new file mode 100644
index 0000000..0926be8
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/lib.rs
@@ -0,0 +1,16 @@
+/// Contains the core of the Polonius borrow checking engine.
+/// Input is fed in via AllFacts, and outputs are returned via Output
+extern crate datafrog;
+#[macro_use]
+extern crate log;
+extern crate rustc_hash;
+
+mod facts;
+mod output;
+
+// Reexports of facts
+pub use facts::AllFacts;
+pub use facts::Atom;
+pub use facts::FactTypes;
+pub use output::Algorithm;
+pub use output::Output;
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/datafrog_opt.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/datafrog_opt.rs
new file mode 100644
index 0000000..da9c343
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/datafrog_opt.rs
@@ -0,0 +1,495 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{Context, Output};
+
+pub(super) fn compute<T: FactTypes>(
+ ctx: &Context<'_, T>,
+ result: &mut Output<T>,
+) -> (
+ Relation<(T::Loan, T::Point)>,
+ Relation<(T::Origin, T::Origin, T::Point)>,
+) {
+ let timer = Instant::now();
+
+ let (errors, subset_errors) = {
+ // Static inputs
+ let origin_live_on_entry_rel = &ctx.origin_live_on_entry;
+ let cfg_edge_rel = &ctx.cfg_edge;
+ let loan_killed_at = &ctx.loan_killed_at;
+ let known_placeholder_subset = &ctx.known_placeholder_subset;
+ let placeholder_origin = &ctx.placeholder_origin;
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // `loan_invalidated_at` facts, stored ready for joins
+ let loan_invalidated_at =
+ iteration.variable::<((T::Loan, T::Point), ())>("loan_invalidated_at");
+
+ // we need `origin_live_on_entry` in both variable and relation forms,
+ // (respectively, for join and antijoin).
+ let origin_live_on_entry_var =
+ iteration.variable::<((T::Origin, T::Point), ())>("origin_live_on_entry");
+
+ // `loan_issued_at` input but organized for join
+ let loan_issued_at_op =
+ iteration.variable::<((T::Origin, T::Point), T::Loan)>("loan_issued_at_op");
+
+ // .decl subset(origin1, origin2, point)
+ //
+ // Indicates that `origin1: origin2` at `point`.
+ let subset_o1p = iteration.variable::<((T::Origin, T::Point), T::Origin)>("subset_o1p");
+
+ // .decl origin_contains_loan_on_entry(origin, loan, point)
+ //
+ // At `point`, things with `origin` may depend on data from `loan`.
+ let origin_contains_loan_on_entry_op = iteration
+ .variable::<((T::Origin, T::Point), T::Loan)>("origin_contains_loan_on_entry_op");
+
+ // .decl loan_live_at(loan, point)
+ //
+ // True if the restrictions of the `loan` need to be enforced at `point`.
+ let loan_live_at = iteration.variable::<((T::Loan, T::Point), ())>("loan_live_at");
+
+ // .decl live_to_dying_regions(origin1, origin2, point1, point2)
+ //
+ // The origins `origin1` and `origin2` are "live to dead"
+ // on the edge `point1 -> point2` if:
+ //
+ // - In `point1`, `origin1` <= `origin2`
+ // - In `point2`, `origin1` is live but `origin2` is dead.
+ //
+ // In that case, `point2` would like to add all the
+ // live things reachable from `origin2` to `origin1`.
+ //
+ let live_to_dying_regions_o2pq = iteration
+ .variable::<((T::Origin, T::Point, T::Point), T::Origin)>("live_to_dying_regions_o2pq");
+
+ // .decl dying_region_requires((origin, point1, point2), loan)
+ //
+ // The `origin` requires `loan`, but the `origin` goes dead
+ // along the edge `point1 -> point2`.
+ let dying_region_requires = iteration
+ .variable::<((T::Origin, T::Point, T::Point), T::Loan)>("dying_region_requires");
+
+ // .decl dying_can_reach_origins(origin, point1, point2)
+ //
+ // Contains dead origins where we are interested
+ // in computing the transitive closure of things they
+ // can reach.
+ //
+ // FIXME: this relation was named before renaming the `regions` atoms to `origins`, and
+ // will need to be renamed to change "_origins" to "_ascendants", "_roots", etc.
+ let dying_can_reach_origins =
+ iteration.variable::<((T::Origin, T::Point), T::Point)>("dying_can_reach_origins");
+
+ // .decl dying_can_reach(origin1, origin2, point1, point2)
+ //
+ // Indicates that `origin1`, which is dead
+ // in `point2`, can reach `origin2` in `point1`.
+ //
+ // This is effectively the transitive subset
+ // relation, but we try to limit it to origins
+ // that are dying on the edge `point1 -> point2`.
+ let dying_can_reach_o2q =
+ iteration.variable::<((T::Origin, T::Point), (T::Origin, T::Point))>("dying_can_reach");
+ let dying_can_reach_1 = iteration.variable_indistinct("dying_can_reach_1");
+
+ // .decl dying_can_reach_live(origin1, origin2, point1, point2)
+ //
+ // Indicates that, along the edge `point1 -> point2`, the dead (in `point2`)
+ // `origin1` can reach the live (in `point2`) `origin2` via a subset
+ // relation. This is a subset of the full `dying_can_reach`
+ // relation where we filter down to those cases where `origin2` is
+ // live in `point2`.
+ let dying_can_reach_live = iteration
+ .variable::<((T::Origin, T::Point, T::Point), T::Origin)>("dying_can_reach_live");
+
+ // .decl dead_borrow_region_can_reach_root((origin, point), loan)
+ //
+ // Indicates a "borrow region" `origin` at `point` which is not live on
+ // entry to `point`.
+ let dead_borrow_region_can_reach_root = iteration
+ .variable::<((T::Origin, T::Point), T::Loan)>("dead_borrow_region_can_reach_root");
+
+ // .decl dead_borrow_region_can_reach_dead((origin2, point), loan)
+ let dead_borrow_region_can_reach_dead = iteration
+ .variable::<((T::Origin, T::Point), T::Loan)>("dead_borrow_region_can_reach_dead");
+ let dead_borrow_region_can_reach_dead_1 =
+ iteration.variable_indistinct("dead_borrow_region_can_reach_dead_1");
+
+ // .decl errors(loan, point)
+ let errors = iteration.variable("errors");
+ let subset_errors = iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset_errors");
+
+ let subset_placeholder =
+ iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset_placeholder");
+ let subset_placeholder_o2p = iteration.variable_indistinct("subset_placeholder_o2p");
+
+ // Make "variable" versions of the relations, needed for joins.
+ loan_issued_at_op.extend(
+ ctx.loan_issued_at
+ .iter()
+ .map(|&(origin, loan, point)| ((origin, point), loan)),
+ );
+ loan_invalidated_at.extend(
+ ctx.loan_invalidated_at
+ .iter()
+ .map(|&(loan, point)| ((loan, point), ())),
+ );
+ origin_live_on_entry_var.extend(
+ origin_live_on_entry_rel
+ .iter()
+ .map(|&(origin, point)| ((origin, point), ())),
+ );
+
+ // subset(origin1, origin2, point) :-
+ // subset_base(origin1, origin2, point).
+ subset_o1p.extend(
+ ctx.subset_base
+ .iter()
+ .map(|&(origin1, origin2, point)| ((origin1, point), origin2)),
+ );
+
+ // origin_contains_loan_on_entry(origin, loan, point) :-
+ // loan_issued_at(origin, loan, point).
+ origin_contains_loan_on_entry_op.extend(
+ ctx.loan_issued_at
+ .iter()
+ .map(|&(origin, loan, point)| ((origin, point), loan)),
+ );
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // Cleanup step: remove symmetries
+ // - remove origins which are `subset`s of themselves
+ //
+ // FIXME: investigate whether is there a better way to do that without complicating
+ // the rules too much, because it would also require temporary variables and
+ // impact performance. Until then, the big reduction in tuples improves performance
+ // a lot, even if we're potentially adding a small number of tuples
+ // per round just to remove them in the next round.
+ subset_o1p
+ .recent
+ .borrow_mut()
+ .elements
+ .retain(|&((origin1, _), origin2)| origin1 != origin2);
+
+ subset_placeholder
+ .recent
+ .borrow_mut()
+ .elements
+ .retain(|&(origin1, origin2, _)| origin1 != origin2);
+ subset_placeholder_o2p.from_map(&subset_placeholder, |&(origin1, origin2, point)| {
+ ((origin2, point), origin1)
+ });
+
+ // live_to_dying_regions(origin1, origin2, point1, point2) :-
+ // subset(origin1, origin2, point1),
+ // cfg_edge(point1, point2),
+ // origin_live_on_entry(origin1, point2),
+ // !origin_live_on_entry(origin2, point2).
+ live_to_dying_regions_o2pq.from_leapjoin(
+ &subset_o1p,
+ (
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_with(|&((origin1, _), _)| origin1),
+ origin_live_on_entry_rel.extend_anti(|&((_, _), origin2)| origin2),
+ ),
+ |&((origin1, point1), origin2), &point2| ((origin2, point1, point2), origin1),
+ );
+
+ // dying_region_requires((origin, point1, point2), loan) :-
+ // origin_contains_loan_on_entry(origin, loan, point1),
+ // !loan_killed_at(loan, point1),
+ // cfg_edge(point1, point2),
+ // !origin_live_on_entry(origin, point2).
+ dying_region_requires.from_leapjoin(
+ &origin_contains_loan_on_entry_op,
+ (
+ loan_killed_at.filter_anti(|&((_, point1), loan)| (loan, point1)),
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_anti(|&((origin, _), _)| origin),
+ ),
+ |&((origin, point1), loan), &point2| ((origin, point1, point2), loan),
+ );
+
+ // dying_can_reach_origins(origin2, point1, point2) :-
+ // live_to_dying_regions(_, origin2, point1, point2).
+ dying_can_reach_origins.from_map(
+ &live_to_dying_regions_o2pq,
+ |&((origin2, point1, point2), _origin1)| ((origin2, point1), point2),
+ );
+
+ // dying_can_reach_origins(origin, point1, point2) :-
+ // dying_region_requires(origin, point1, point2, _loan).
+ dying_can_reach_origins.from_map(
+ &dying_region_requires,
+ |&((origin, point1, point2), _loan)| ((origin, point1), point2),
+ );
+
+ // dying_can_reach(origin1, origin2, point1, point2) :-
+ // dying_can_reach_origins(origin1, point1, point2),
+ // subset(origin1, origin2, point1).
+ dying_can_reach_o2q.from_join(
+ &dying_can_reach_origins,
+ &subset_o1p,
+ |&(origin1, point1), &point2, &origin2| ((origin2, point2), (origin1, point1)),
+ );
+
+ // dying_can_reach(origin1, origin3, point1, point2) :-
+ // dying_can_reach(origin1, origin2, point1, point2),
+ // !origin_live_on_entry(origin2, point2),
+ // subset(origin2, origin3, point1).
+ //
+ // This is the "transitive closure" rule, but
+ // note that we only apply it with the
+ // "intermediate" `origin2` is dead at `point2`.
+ dying_can_reach_1.from_antijoin(
+ &dying_can_reach_o2q,
+ &origin_live_on_entry_rel,
+ |&(origin2, point2), &(origin1, point1)| ((origin2, point1), (origin1, point2)),
+ );
+ dying_can_reach_o2q.from_join(
+ &dying_can_reach_1,
+ &subset_o1p,
+ |&(_origin2, point1), &(origin1, point2), &origin3| {
+ ((origin3, point2), (origin1, point1))
+ },
+ );
+
+ // dying_can_reach_live(origin1, origin2, point1, point2) :-
+ // dying_can_reach(origin1, origin2, point1, point2),
+ // origin_live_on_entry(origin2, point2).
+ dying_can_reach_live.from_join(
+ &dying_can_reach_o2q,
+ &origin_live_on_entry_var,
+ |&(origin2, point2), &(origin1, point1), _| ((origin1, point1, point2), origin2),
+ );
+
+ // subset(origin1, origin2, point2) :-
+ // subset(origin1, origin2, point1),
+ // cfg_edge(point1, point2),
+ // origin_live_on_entry(origin1, point2),
+ // origin_live_on_entry(origin2, point2).
+ //
+ // Carry `origin1 <= origin2` from `point1` into `point2` if both `origin1` and
+ // `origin2` are live in `point2`.
+ subset_o1p.from_leapjoin(
+ &subset_o1p,
+ (
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_with(|&((origin1, _), _)| origin1),
+ origin_live_on_entry_rel.extend_with(|&((_, _), origin2)| origin2),
+ ),
+ |&((origin1, _point1), origin2), &point2| ((origin1, point2), origin2),
+ );
+
+ // subset(origin1, origin3, point2) :-
+ // live_to_dying_regions(origin1, origin2, point1, point2),
+ // dying_can_reach_live(origin2, origin3, point1, point2).
+ subset_o1p.from_join(
+ &live_to_dying_regions_o2pq,
+ &dying_can_reach_live,
+ |&(_origin2, _point1, point2), &origin1, &origin3| ((origin1, point2), origin3),
+ );
+
+ // origin_contains_loan_on_entry(origin2, loan, point2) :-
+ // dying_region_requires(origin1, loan, point1, point2),
+ // dying_can_reach_live(origin1, origin2, point1, point2).
+ //
+ // Communicate a `origin1 contains loan` relation across
+ // an edge `point1 -> point2` where `origin1` is dead in `point2`; in
+ // that case, for each origin `origin2` live in `point2`
+ // where `origin1 <= origin2` in `point1`, we add `origin2 contains loan`
+ // to `point2`.
+ origin_contains_loan_on_entry_op.from_join(
+ &dying_region_requires,
+ &dying_can_reach_live,
+ |&(_origin1, _point1, point2), &loan, &origin2| ((origin2, point2), loan),
+ );
+
+ // origin_contains_loan_on_entry(origin, loan, point2) :-
+ // origin_contains_loan_on_entry(origin, loan, point1),
+ // !loan_killed_at(loan, point1),
+ // cfg_edge(point1, point2),
+ // origin_live_on_entry(origin, point2).
+ origin_contains_loan_on_entry_op.from_leapjoin(
+ &origin_contains_loan_on_entry_op,
+ (
+ loan_killed_at.filter_anti(|&((_, point1), loan)| (loan, point1)),
+ cfg_edge_rel.extend_with(|&((_, point1), _)| point1),
+ origin_live_on_entry_rel.extend_with(|&((origin, _), _)| origin),
+ ),
+ |&((origin, _), loan), &point2| ((origin, point2), loan),
+ );
+
+ // dead_borrow_region_can_reach_root((origin, point), loan) :-
+ // loan_issued_at(origin, loan, point),
+ // !origin_live_on_entry(origin, point).
+ dead_borrow_region_can_reach_root.from_antijoin(
+ &loan_issued_at_op,
+ &origin_live_on_entry_rel,
+ |&(origin, point), &loan| ((origin, point), loan),
+ );
+
+ // dead_borrow_region_can_reach_dead((origin, point), loan) :-
+ // dead_borrow_region_can_reach_root((origin, point), loan).
+ dead_borrow_region_can_reach_dead
+ .from_map(&dead_borrow_region_can_reach_root, |&tuple| tuple);
+
+ // dead_borrow_region_can_reach_dead((origin2, point), loan) :-
+ // dead_borrow_region_can_reach_dead(origin1, loan, point),
+ // subset(origin1, origin2, point),
+ // !origin_live_on_entry(origin2, point).
+ dead_borrow_region_can_reach_dead_1.from_join(
+ &dead_borrow_region_can_reach_dead,
+ &subset_o1p,
+ |&(_origin1, point), &loan, &origin2| ((origin2, point), loan),
+ );
+ dead_borrow_region_can_reach_dead.from_antijoin(
+ &dead_borrow_region_can_reach_dead_1,
+ &origin_live_on_entry_rel,
+ |&(origin2, point), &loan| ((origin2, point), loan),
+ );
+
+ // loan_live_at(loan, point) :-
+ // origin_contains_loan_on_entry(origin, loan, point),
+ // origin_live_on_entry(origin, point).
+ loan_live_at.from_join(
+ &origin_contains_loan_on_entry_op,
+ &origin_live_on_entry_var,
+ |&(_origin, point), &loan, _| ((loan, point), ()),
+ );
+
+ // loan_live_at(loan, point) :-
+ // dead_borrow_region_can_reach_dead(origin1, loan, point),
+ // subset(origin1, origin2, point),
+ // origin_live_on_entry(origin2, point).
+ //
+ // NB: the datafrog code below uses
+ // `dead_borrow_region_can_reach_dead_1`, which is equal
+ // to `dead_borrow_region_can_reach_dead` and `subset`
+ // joined together.
+ loan_live_at.from_join(
+ &dead_borrow_region_can_reach_dead_1,
+ &origin_live_on_entry_var,
+ |&(_origin2, point), &loan, _| ((loan, point), ()),
+ );
+
+ // errors(loan, point) :-
+ // loan_invalidated_at(loan, point),
+ // loan_live_at(loan, point).
+ errors.from_join(
+ &loan_invalidated_at,
+ &loan_live_at,
+ |&(loan, point), _, _| (loan, point),
+ );
+
+ // subset_placeholder(Origin1, Origin2, Point) :-
+ // subset(Origin1, Origin2, Point),
+ // placeholder_origin(Origin1).
+ subset_placeholder.from_leapjoin(
+ &subset_o1p,
+ (
+ placeholder_origin.extend_with(|&((origin1, _point), _origin2)| origin1),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&((origin1, _point), origin2), _| {
+ origin1 != origin2
+ }),
+ ),
+ |&((origin1, point), origin2), _| (origin1, origin2, point),
+ );
+
+ // We compute the transitive closure of the placeholder origins, so we
+ // maintain the invariant from the rule above that `Origin1` is a placeholder origin.
+ //
+ // subset_placeholder(Origin1, Origin3, Point) :-
+ // subset_placeholder(Origin1, Origin2, Point),
+ // subset(Origin2, Origin3, Point).
+ subset_placeholder.from_join(
+ &subset_placeholder_o2p,
+ &subset_o1p,
+ |&(_origin2, point), &origin1, &origin3| (origin1, origin3, point),
+ );
+
+ // subset_error(Origin1, Origin2, Point) :-
+ // subset_placeholder(Origin1, Origin2, Point),
+ // placeholder_origin(Origin2),
+ // !known_placeholder_subset(Origin1, Origin2).
+ subset_errors.from_leapjoin(
+ &subset_placeholder,
+ (
+ placeholder_origin.extend_with(|&(_origin1, origin2, _point)| origin2),
+ known_placeholder_subset
+ .filter_anti(|&(origin1, origin2, _point)| (origin1, origin2)),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&(origin1, origin2, _point), _| {
+ origin1 != origin2
+ }),
+ ),
+ |&(origin1, origin2, point), _| (origin1, origin2, point),
+ );
+ }
+
+ if result.dump_enabled {
+ let subset_o1p = subset_o1p.complete();
+ assert!(
+ subset_o1p
+ .iter()
+ .filter(|&((origin1, _), origin2)| origin1 == origin2)
+ .count()
+ == 0,
+ "unwanted subset symmetries"
+ );
+ for &((origin1, location), origin2) in subset_o1p.iter() {
+ result
+ .subset
+ .entry(location)
+ .or_default()
+ .entry(origin1)
+ .or_default()
+ .insert(origin2);
+ }
+
+ let origin_contains_loan_on_entry_op = origin_contains_loan_on_entry_op.complete();
+ for &((origin, location), loan) in origin_contains_loan_on_entry_op.iter() {
+ result
+ .origin_contains_loan_at
+ .entry(location)
+ .or_default()
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+
+ let loan_live_at = loan_live_at.complete();
+ for &((loan, location), _) in loan_live_at.iter() {
+ result.loan_live_at.entry(location).or_default().push(loan);
+ }
+ }
+
+ (errors.complete(), subset_errors.complete())
+ };
+
+ info!(
+ "analysis done: {} `errors` tuples, {} `subset_errors` tuples, {:?}",
+ errors.len(),
+ subset_errors.len(),
+ timer.elapsed()
+ );
+
+ (errors, subset_errors)
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/initialization.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/initialization.rs
new file mode 100644
index 0000000..30409d9
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/initialization.rs
@@ -0,0 +1,284 @@
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{InitializationContext, Output};
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+
+// This represents the output of an intermediate elaboration step (step 1).
+struct TransitivePaths<T: FactTypes> {
+ path_moved_at: Relation<(T::Path, T::Point)>,
+ path_assigned_at: Relation<(T::Path, T::Point)>,
+ path_accessed_at: Relation<(T::Path, T::Point)>,
+ path_begins_with_var: Relation<(T::Path, T::Variable)>,
+}
+
+struct InitializationStatus<T: FactTypes> {
+ var_maybe_partly_initialized_on_exit: Relation<(T::Variable, T::Point)>,
+ move_error: Relation<(T::Path, T::Point)>,
+}
+
+pub(super) struct InitializationResult<T: FactTypes>(
+ pub(super) Relation<(T::Variable, T::Point)>,
+ pub(super) Relation<(T::Path, T::Point)>,
+);
+
+// Step 1: compute transitive closures of path operations. This would elaborate,
+// for example, an access to x into an access to x.f, x.f.0, etc. We do this for:
+// - access to a path
+// - initialization of a path
+// - moves of a path
+// FIXME: transitive rooting in a variable (path_begins_with_var)
+// Note that this step may not be entirely necessary!
+fn compute_transitive_paths<T: FactTypes>(
+ child_path: Vec<(T::Path, T::Path)>,
+ path_assigned_at_base: Vec<(T::Path, T::Point)>,
+ path_moved_at_base: Vec<(T::Path, T::Point)>,
+ path_accessed_at_base: Vec<(T::Path, T::Point)>,
+ path_is_var: Vec<(T::Path, T::Variable)>,
+) -> TransitivePaths<T> {
+ let mut iteration = Iteration::new();
+ let child_path: Relation<(T::Path, T::Path)> = child_path.into();
+
+ let ancestor_path = iteration.variable::<(T::Path, T::Path)>("ancestor");
+
+ // These are the actual targets:
+ let path_moved_at = iteration.variable::<(T::Path, T::Point)>("path_moved_at");
+ let path_assigned_at = iteration.variable::<(T::Path, T::Point)>("path_initialized_at");
+ let path_accessed_at = iteration.variable::<(T::Path, T::Point)>("path_accessed_at");
+ let path_begins_with_var = iteration.variable::<(T::Path, T::Variable)>("path_begins_with_var");
+
+ // ancestor_path(Parent, Child) :- child_path(Child, Parent).
+ ancestor_path.extend(child_path.iter().map(|&(child, parent)| (parent, child)));
+
+ // path_moved_at(Path, Point) :- path_moved_at_base(Path, Point).
+ path_moved_at.insert(path_moved_at_base.into());
+
+ // path_assigned_at(Path, Point) :- path_assigned_at_base(Path, Point).
+ path_assigned_at.insert(path_assigned_at_base.into());
+
+ // path_accessed_at(Path, Point) :- path_accessed_at_base(Path, Point).
+ path_accessed_at.insert(path_accessed_at_base.into());
+
+ // path_begins_with_var(Path, Var) :- path_is_var(Path, Var).
+ path_begins_with_var.insert(path_is_var.into());
+
+ while iteration.changed() {
+ // ancestor_path(Grandparent, Child) :-
+ // ancestor_path(Parent, Child),
+ // child_path(Parent, Grandparent).
+ ancestor_path.from_join(
+ &ancestor_path,
+ &child_path,
+ |&_parent, &child, &grandparent| (grandparent, child),
+ );
+
+ // moving a path moves its children
+ // path_moved_at(Child, Point) :-
+ // path_moved_at(Parent, Point),
+ // ancestor_path(Parent, Child).
+ path_moved_at.from_join(&path_moved_at, &ancestor_path, |&_parent, &p, &child| {
+ (child, p)
+ });
+
+ // initialising x at p initialises all x:s children
+ // path_assigned_at(Child, point) :-
+ // path_assigned_at(Parent, point),
+ // ancestor_path(Parent, Child).
+ path_assigned_at.from_join(&path_assigned_at, &ancestor_path, |&_parent, &p, &child| {
+ (child, p)
+ });
+
+ // accessing x at p accesses all x:s children at p (actually,
+ // accesses should be maximally precise and this shouldn't happen?)
+ // path_accessed_at(Child, point) :-
+ // path_accessed_at(Parent, point),
+ // ancestor_path(Parent, Child).
+ path_accessed_at.from_join(&path_accessed_at, &ancestor_path, |&_parent, &p, &child| {
+ (child, p)
+ });
+
+ // path_begins_with_var(Child, Var) :-
+ // path_begins_with_var(Parent, Var)
+ // ancestor_path(Parent, Child).
+ path_begins_with_var.from_join(
+ &path_begins_with_var,
+ &ancestor_path,
+ |&_parent, &var, &child| (child, var),
+ );
+ }
+
+ TransitivePaths {
+ path_assigned_at: path_assigned_at.complete(),
+ path_moved_at: path_moved_at.complete(),
+ path_accessed_at: path_accessed_at.complete(),
+ path_begins_with_var: path_begins_with_var.complete(),
+ }
+}
+
+// Step 2: Compute path initialization and deinitialization across the CFG.
+fn compute_move_errors<T: FactTypes>(
+ ctx: TransitivePaths<T>,
+ cfg_edge: &Relation<(T::Point, T::Point)>,
+ output: &mut Output<T>,
+) -> InitializationStatus<T> {
+ let mut iteration = Iteration::new();
+ // Variables
+
+ // var_maybe_partly_initialized_on_exit(var, point): Upon leaving `point`,
+ // `var` is partially initialized for some path through the CFG, that is
+ // there has been an initialization of var, and var has not been moved in
+ // all paths through the CFG.
+ let var_maybe_partly_initialized_on_exit =
+ iteration.variable::<(T::Variable, T::Point)>("var_maybe_partly_initialized_on_exit");
+
+ // path_maybe_initialized_on_exit(path, point): Upon leaving `point`, the
+ // move path `path` is initialized for some path through the CFG.
+ let path_maybe_initialized_on_exit =
+ iteration.variable::<(T::Path, T::Point)>("path_maybe_initialized_on_exit");
+
+ // path_maybe_uninitialized_on_exit(Path, Point): There exists at least one
+ // path through the CFG to Point such that `Path` has been moved out by the
+ // time we arrive at `Point` without it being re-initialized for sure.
+ let path_maybe_uninitialized_on_exit =
+ iteration.variable::<(T::Path, T::Point)>("path_maybe_uninitialized_on_exit");
+
+ // move_error(Path, Point): There is an access to `Path` at `Point`, but
+ // `Path` is potentially moved (or never initialised).
+ let move_error = iteration.variable::<(T::Path, T::Point)>("move_error");
+
+ // Initial propagation of static relations
+
+ // path_maybe_initialized_on_exit(path, point) :- path_assigned_at(path, point).
+ path_maybe_initialized_on_exit.insert(ctx.path_assigned_at.clone());
+
+ // path_maybe_uninitialized_on_exit(path, point) :- path_moved_at(path, point).
+ path_maybe_uninitialized_on_exit.insert(ctx.path_moved_at.clone());
+
+ while iteration.changed() {
+ // path_maybe_initialized_on_exit(path, point2) :-
+ // path_maybe_initialized_on_exit(path, point1),
+ // cfg_edge(point1, point2),
+ // !path_moved_at(path, point2).
+ path_maybe_initialized_on_exit.from_leapjoin(
+ &path_maybe_initialized_on_exit,
+ (
+ cfg_edge.extend_with(|&(_path, point1)| point1),
+ ctx.path_moved_at.extend_anti(|&(path, _point1)| path),
+ ),
+ |&(path, _point1), &point2| (path, point2),
+ );
+
+ // path_maybe_uninitialized_on_exit(path, point2) :-
+ // path_maybe_uninitialized_on_exit(path, point1),
+ // cfg_edge(point1, point2)
+ // !path_assigned_at(path, point2).
+ path_maybe_uninitialized_on_exit.from_leapjoin(
+ &path_maybe_uninitialized_on_exit,
+ (
+ cfg_edge.extend_with(|&(_path, point1)| point1),
+ ctx.path_assigned_at.extend_anti(|&(path, _point1)| path),
+ ),
+ |&(path, _point1), &point2| (path, point2),
+ );
+
+ // var_maybe_partly_initialized_on_exit(var, point) :-
+ // path_maybe_initialized_on_exit(path, point).
+ // path_begins_with_var(path, var).
+ var_maybe_partly_initialized_on_exit.from_leapjoin(
+ &path_maybe_initialized_on_exit,
+ ctx.path_begins_with_var.extend_with(|&(path, _point)| path),
+ |&(_path, point), &var| (var, point),
+ );
+
+ // move_error(Path, TargetNode) :-
+ // path_maybe_uninitialized_on_exit(Path, SourceNode),
+ // cfg_edge(SourceNode, TargetNode),
+ // path_accessed_at(Path, TargetNode).
+ move_error.from_leapjoin(
+ &path_maybe_uninitialized_on_exit,
+ (
+ cfg_edge.extend_with(|&(_path, source_node)| source_node),
+ ctx.path_accessed_at
+ .extend_with(|&(path, _source_node)| path),
+ ),
+ |&(path, _source_node), &target_node| (path, target_node),
+ );
+ }
+
+ if output.dump_enabled {
+ for &(path, location) in path_maybe_initialized_on_exit.complete().iter() {
+ output
+ .path_maybe_initialized_on_exit
+ .entry(location)
+ .or_default()
+ .push(path);
+ }
+
+ for &(path, location) in path_maybe_uninitialized_on_exit.complete().iter() {
+ output
+ .path_maybe_uninitialized_on_exit
+ .entry(location)
+ .or_default()
+ .push(path);
+ }
+ }
+
+ InitializationStatus {
+ var_maybe_partly_initialized_on_exit: var_maybe_partly_initialized_on_exit.complete(),
+ move_error: move_error.complete(),
+ }
+}
+
+// Compute two things:
+//
+// - an over-approximation of the initialization of variables. This is used in
+// the origin_live_on_entry computations to determine when a drop may happen; a
+// definitely moved variable would not be actually dropped.
+// - move errors.
+//
+// The process is split into two stages:
+//
+// 1. Compute the transitive closure of path accesses. That is, accessing `f.a`
+// would access `f.a.b`, etc.
+// 2. Use this to compute both paths that may be initialized and paths that may
+// have been deinitialized, which in turn can be used to find move errors (an
+// access to a path that may be deinitialized).
+pub(super) fn compute<T: FactTypes>(
+ ctx: InitializationContext<T>,
+ cfg_edge: &Relation<(T::Point, T::Point)>,
+ output: &mut Output<T>,
+) -> InitializationResult<T> {
+ let timer = Instant::now();
+
+ let transitive_paths = compute_transitive_paths::<T>(
+ ctx.child_path,
+ ctx.path_assigned_at_base,
+ ctx.path_moved_at_base,
+ ctx.path_accessed_at_base,
+ ctx.path_is_var,
+ );
+ info!("initialization phase 1 completed: {:?}", timer.elapsed());
+
+ let InitializationStatus {
+ var_maybe_partly_initialized_on_exit,
+ move_error,
+ } = compute_move_errors::<T>(transitive_paths, cfg_edge, output);
+ info!(
+ "initialization phase 2: {} move errors in {:?}",
+ move_error.elements.len(),
+ timer.elapsed()
+ );
+
+ if output.dump_enabled {
+ for &(var, location) in var_maybe_partly_initialized_on_exit.iter() {
+ output
+ .var_maybe_partly_initialized_on_exit
+ .entry(location)
+ .or_default()
+ .push(var);
+ }
+ }
+
+ InitializationResult(var_maybe_partly_initialized_on_exit, move_error)
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/liveness.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/liveness.rs
new file mode 100644
index 0000000..1b4b4ce
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/liveness.rs
@@ -0,0 +1,170 @@
+// Copyright 2019 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! An implementation of the origin liveness calculation logic
+
+use std::collections::BTreeSet;
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{LivenessContext, Output};
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+
+pub(super) fn compute_live_origins<T: FactTypes>(
+ ctx: LivenessContext<T>,
+ cfg_edge: &Relation<(T::Point, T::Point)>,
+ var_maybe_partly_initialized_on_exit: Relation<(T::Variable, T::Point)>,
+ output: &mut Output<T>,
+) -> Vec<(T::Origin, T::Point)> {
+ let timer = Instant::now();
+ let mut iteration = Iteration::new();
+
+ // Relations
+ let var_defined_at: Relation<(T::Variable, T::Point)> = ctx.var_defined_at.into();
+ let cfg_edge_reverse: Relation<(T::Point, T::Point)> = cfg_edge
+ .iter()
+ .map(|&(point1, point2)| (point2, point1))
+ .collect();
+ let use_of_var_derefs_origin: Relation<(T::Variable, T::Origin)> =
+ ctx.use_of_var_derefs_origin.into();
+ let drop_of_var_derefs_origin: Relation<(T::Variable, T::Origin)> =
+ ctx.drop_of_var_derefs_origin.into();
+ let var_dropped_at: Relation<((T::Variable, T::Point), ())> = ctx
+ .var_dropped_at
+ .into_iter()
+ .map(|(var, point)| ((var, point), ()))
+ .collect();
+
+ // Variables
+
+ // `var_live_on_entry`: variable `var` is live upon entry at `point`
+ let var_live_on_entry = iteration.variable::<(T::Variable, T::Point)>("var_live_on_entry");
+ // `var_drop_live_on_entry`: variable `var` is drop-live (will be used for a drop) upon entry in `point`
+ let var_drop_live_on_entry =
+ iteration.variable::<(T::Variable, T::Point)>("var_drop_live_on_entry");
+
+ // This is what we are actually calculating:
+ let origin_live_on_entry = iteration.variable::<(T::Origin, T::Point)>("origin_live_on_entry");
+
+ // This propagates the relation `var_live_on_entry(var, point) :- var_used_at(var, point)`:
+ var_live_on_entry.insert(ctx.var_used_at.into());
+
+ // var_maybe_partly_initialized_on_entry(var, point2) :-
+ // var_maybe_partly_initialized_on_exit(var, point1),
+ // cfg_edge(point1, point2).
+ let var_maybe_partly_initialized_on_entry = Relation::from_leapjoin(
+ &var_maybe_partly_initialized_on_exit,
+ cfg_edge.extend_with(|&(_var, point1)| point1),
+ |&(var, _point1), &point2| ((var, point2), ()),
+ );
+
+ // var_drop_live_on_entry(var, point) :-
+ // var_dropped_at(var, point),
+ // var_maybe_partly_initialized_on_entry(var, point).
+ var_drop_live_on_entry.insert(Relation::from_join(
+ &var_dropped_at,
+ &var_maybe_partly_initialized_on_entry,
+ |&(var, point), _, _| (var, point),
+ ));
+
+ while iteration.changed() {
+ // origin_live_on_entry(origin, point) :-
+ // var_drop_live_on_entry(var, point),
+ // drop_of_var_derefs_origin(var, origin).
+ origin_live_on_entry.from_join(
+ &var_drop_live_on_entry,
+ &drop_of_var_derefs_origin,
+ |_var, &point, &origin| (origin, point),
+ );
+
+ // origin_live_on_entry(origin, point) :-
+ // var_live_on_entry(var, point),
+ // use_of_var_derefs_origin(var, origin).
+ origin_live_on_entry.from_join(
+ &var_live_on_entry,
+ &use_of_var_derefs_origin,
+ |_var, &point, &origin| (origin, point),
+ );
+
+ // var_live_on_entry(var, point1) :-
+ // var_live_on_entry(var, point2),
+ // cfg_edge(point1, point2),
+ // !var_defined(var, point1).
+ var_live_on_entry.from_leapjoin(
+ &var_live_on_entry,
+ (
+ var_defined_at.extend_anti(|&(var, _point2)| var),
+ cfg_edge_reverse.extend_with(|&(_var, point2)| point2),
+ ),
+ |&(var, _point2), &point1| (var, point1),
+ );
+
+ // var_drop_live_on_entry(Var, SourceNode) :-
+ // var_drop_live_on_entry(Var, TargetNode),
+ // cfg_edge(SourceNode, TargetNode),
+ // !var_defined_at(Var, SourceNode),
+ // var_maybe_partly_initialized_on_exit(Var, SourceNode).
+ var_drop_live_on_entry.from_leapjoin(
+ &var_drop_live_on_entry,
+ (
+ var_defined_at.extend_anti(|&(var, _target_node)| var),
+ cfg_edge_reverse.extend_with(|&(_var, target_node)| target_node),
+ var_maybe_partly_initialized_on_exit.extend_with(|&(var, _target_node)| var),
+ ),
+ |&(var, _targetnode), &source_node| (var, source_node),
+ );
+ }
+
+ let origin_live_on_entry = origin_live_on_entry.complete();
+
+ info!(
+ "compute_live_origins() completed: {} tuples, {:?}",
+ origin_live_on_entry.len(),
+ timer.elapsed(),
+ );
+
+ if output.dump_enabled {
+ let var_drop_live_on_entry = var_drop_live_on_entry.complete();
+ for &(var, location) in var_drop_live_on_entry.iter() {
+ output
+ .var_drop_live_on_entry
+ .entry(location)
+ .or_default()
+ .push(var);
+ }
+
+ let var_live_on_entry = var_live_on_entry.complete();
+ for &(var, location) in var_live_on_entry.iter() {
+ output
+ .var_live_on_entry
+ .entry(location)
+ .or_default()
+ .push(var);
+ }
+ }
+
+ origin_live_on_entry.elements
+}
+
+pub(super) fn make_universal_regions_live<T: FactTypes>(
+ origin_live_on_entry: &mut Vec<(T::Origin, T::Point)>,
+ cfg_node: &BTreeSet<T::Point>,
+ universal_regions: &[T::Origin],
+) {
+ debug!("make_universal_regions_live()");
+
+ origin_live_on_entry.reserve(universal_regions.len() * cfg_node.len());
+ for &origin in universal_regions.iter() {
+ for &point in cfg_node.iter() {
+ origin_live_on_entry.push((origin, point));
+ }
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/location_insensitive.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/location_insensitive.rs
new file mode 100644
index 0000000..83ce277
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/location_insensitive.rs
@@ -0,0 +1,156 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{Context, Output};
+
+pub(super) fn compute<T: FactTypes>(
+ ctx: &Context<'_, T>,
+ result: &mut Output<T>,
+) -> (
+ Relation<(T::Loan, T::Point)>,
+ Relation<(T::Origin, T::Origin)>,
+) {
+ let timer = Instant::now();
+
+ let (potential_errors, potential_subset_errors) = {
+ // Static inputs
+ let origin_live_on_entry = &ctx.origin_live_on_entry;
+ let loan_invalidated_at = &ctx.loan_invalidated_at;
+ let placeholder_origin = &ctx.placeholder_origin;
+ let placeholder_loan = &ctx.placeholder_loan;
+ let known_contains = &ctx.known_contains;
+
+ // subset(Origin1, Origin2) :-
+ // subset_base(Origin1, Origin2, _).
+ let subset = Relation::from_iter(
+ ctx.subset_base
+ .iter()
+ .map(|&(origin1, origin2, _point)| (origin1, origin2)),
+ );
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // .. some variables, ..
+ let origin_contains_loan_on_entry =
+ iteration.variable::<(T::Origin, T::Loan)>("origin_contains_loan_on_entry");
+
+ let potential_errors = iteration.variable::<(T::Loan, T::Point)>("potential_errors");
+ let potential_subset_errors =
+ iteration.variable::<(T::Origin, T::Origin)>("potential_subset_errors");
+
+ // load initial facts.
+
+ // origin_contains_loan_on_entry(Origin, Loan) :-
+ // loan_issued_at(Origin, Loan, _).
+ origin_contains_loan_on_entry.extend(
+ ctx.loan_issued_at
+ .iter()
+ .map(|&(origin, loan, _point)| (origin, loan)),
+ );
+
+ // origin_contains_loan_on_entry(Origin, Loan) :-
+ // placeholder_loan(Origin, Loan).
+ origin_contains_loan_on_entry.extend(
+ placeholder_loan
+ .iter()
+ .map(|&(loan, origin)| (origin, loan)),
+ );
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // origin_contains_loan_on_entry(Origin2, Loan) :-
+ // origin_contains_loan_on_entry(Origin1, Loan),
+ // subset(Origin1, Origin2).
+ //
+ // Note: Since `subset` is effectively a static input, this join can be ported to
+ // a leapjoin. Doing so, however, was 7% slower on `clap`.
+ origin_contains_loan_on_entry.from_join(
+ &origin_contains_loan_on_entry,
+ &subset,
+ |&_origin1, &loan, &origin2| (origin2, loan),
+ );
+
+ // loan_live_at(Loan, Point) :-
+ // origin_contains_loan_on_entry(Origin, Loan),
+ // origin_live_on_entry(Origin, Point)
+ //
+ // potential_errors(Loan, Point) :-
+ // loan_invalidated_at(Loan, Point),
+ // loan_live_at(Loan, Point).
+ //
+ // Note: we don't need to materialize `loan_live_at` here
+ // so we can inline it in the `potential_errors` relation.
+ //
+ potential_errors.from_leapjoin(
+ &origin_contains_loan_on_entry,
+ (
+ origin_live_on_entry.extend_with(|&(origin, _loan)| origin),
+ loan_invalidated_at.extend_with(|&(_origin, loan)| loan),
+ ),
+ |&(_origin, loan), &point| (loan, point),
+ );
+
+ // potential_subset_errors(Origin1, Origin2) :-
+ // placeholder(Origin1, Loan1),
+ // placeholder(Origin2, _),
+ // origin_contains_loan_on_entry(Origin2, Loan1),
+ // !known_contains(Origin2, Loan1).
+ potential_subset_errors.from_leapjoin(
+ &origin_contains_loan_on_entry,
+ (
+ known_contains.filter_anti(|&(origin2, loan1)| (origin2, loan1)),
+ placeholder_origin.filter_with(|&(origin2, _loan1)| (origin2, ())),
+ placeholder_loan.extend_with(|&(_origin2, loan1)| loan1),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&(origin2, _loan1), &origin1| origin2 != origin1),
+ ),
+ |&(origin2, _loan1), &origin1| (origin1, origin2),
+ );
+ }
+
+ if result.dump_enabled {
+ for &(origin1, origin2) in subset.iter() {
+ result
+ .subset_anywhere
+ .entry(origin1)
+ .or_default()
+ .insert(origin2);
+ }
+
+ let origin_contains_loan_on_entry = origin_contains_loan_on_entry.complete();
+ for &(origin, loan) in origin_contains_loan_on_entry.iter() {
+ result
+ .origin_contains_loan_anywhere
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+ }
+
+ (
+ potential_errors.complete(),
+ potential_subset_errors.complete(),
+ )
+ };
+
+ info!(
+ "analysis done: {} `potential_errors` tuples, {} `potential_subset_errors` tuples, {:?}",
+ potential_errors.len(),
+ potential_subset_errors.len(),
+ timer.elapsed()
+ );
+
+ (potential_errors, potential_subset_errors)
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/mod.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/mod.rs
new file mode 100644
index 0000000..b840e4b
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/mod.rs
@@ -0,0 +1,614 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use datafrog::Relation;
+use rustc_hash::{FxHashMap, FxHashSet};
+use std::borrow::Cow;
+use std::collections::{BTreeMap, BTreeSet};
+
+use crate::facts::{AllFacts, Atom, FactTypes};
+
+mod datafrog_opt;
+mod initialization;
+mod liveness;
+mod location_insensitive;
+mod naive;
+
+#[derive(Debug, Clone, Copy)]
+pub enum Algorithm {
+ /// Simple rules, but slower to execute
+ Naive,
+
+ /// Optimized variant of the rules
+ DatafrogOpt,
+
+ /// Fast to compute, but imprecise: there can be false-positives
+ /// but no false-negatives. Tailored for quick "early return" situations.
+ LocationInsensitive,
+
+ /// Compares the `Naive` and `DatafrogOpt` variants to ensure they indeed
+ /// compute the same errors.
+ Compare,
+
+ /// Combination of the fast `LocationInsensitive` pre-pass, followed by
+ /// the more expensive `DatafrogOpt` variant.
+ Hybrid,
+}
+
+impl Algorithm {
+ /// Optimized variants that ought to be equivalent to "naive"
+ pub const OPTIMIZED: &'static [Algorithm] = &[Algorithm::DatafrogOpt];
+
+ pub fn variants() -> [&'static str; 5] {
+ [
+ "Naive",
+ "DatafrogOpt",
+ "LocationInsensitive",
+ "Compare",
+ "Hybrid",
+ ]
+ }
+}
+
+impl ::std::str::FromStr for Algorithm {
+ type Err = String;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s.to_lowercase().as_ref() {
+ "naive" => Ok(Algorithm::Naive),
+ "datafrogopt" => Ok(Algorithm::DatafrogOpt),
+ "locationinsensitive" => Ok(Algorithm::LocationInsensitive),
+ "compare" => Ok(Algorithm::Compare),
+ "hybrid" => Ok(Algorithm::Hybrid),
+ _ => Err(String::from(
+ "valid values: Naive, DatafrogOpt, LocationInsensitive, Compare, Hybrid",
+ )),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Output<T: FactTypes> {
+ pub errors: FxHashMap<T::Point, Vec<T::Loan>>,
+ pub subset_errors: FxHashMap<T::Point, BTreeSet<(T::Origin, T::Origin)>>,
+ pub move_errors: FxHashMap<T::Point, Vec<T::Path>>,
+
+ pub dump_enabled: bool,
+
+ // these are just for debugging
+ pub loan_live_at: FxHashMap<T::Point, Vec<T::Loan>>,
+ pub origin_contains_loan_at: FxHashMap<T::Point, BTreeMap<T::Origin, BTreeSet<T::Loan>>>,
+ pub origin_contains_loan_anywhere: FxHashMap<T::Origin, BTreeSet<T::Loan>>,
+ pub origin_live_on_entry: FxHashMap<T::Point, Vec<T::Origin>>,
+ pub loan_invalidated_at: FxHashMap<T::Point, Vec<T::Loan>>,
+ pub subset: FxHashMap<T::Point, BTreeMap<T::Origin, BTreeSet<T::Origin>>>,
+ pub subset_anywhere: FxHashMap<T::Origin, BTreeSet<T::Origin>>,
+ pub var_live_on_entry: FxHashMap<T::Point, Vec<T::Variable>>,
+ pub var_drop_live_on_entry: FxHashMap<T::Point, Vec<T::Variable>>,
+ pub path_maybe_initialized_on_exit: FxHashMap<T::Point, Vec<T::Path>>,
+ pub path_maybe_uninitialized_on_exit: FxHashMap<T::Point, Vec<T::Path>>,
+ pub known_contains: FxHashMap<T::Origin, BTreeSet<T::Loan>>,
+ pub var_maybe_partly_initialized_on_exit: FxHashMap<T::Point, Vec<T::Variable>>,
+}
+
+/// Subset of `AllFacts` dedicated to initialization
+struct InitializationContext<T: FactTypes> {
+ child_path: Vec<(T::Path, T::Path)>,
+ path_is_var: Vec<(T::Path, T::Variable)>,
+ path_assigned_at_base: Vec<(T::Path, T::Point)>,
+ path_moved_at_base: Vec<(T::Path, T::Point)>,
+ path_accessed_at_base: Vec<(T::Path, T::Point)>,
+}
+
+/// Subset of `AllFacts` dedicated to liveness
+struct LivenessContext<T: FactTypes> {
+ var_used_at: Vec<(T::Variable, T::Point)>,
+ var_defined_at: Vec<(T::Variable, T::Point)>,
+ var_dropped_at: Vec<(T::Variable, T::Point)>,
+ use_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+ drop_of_var_derefs_origin: Vec<(T::Variable, T::Origin)>,
+}
+
+/// Subset of `AllFacts` dedicated to borrow checking, and data ready to use by the variants
+struct Context<'ctx, T: FactTypes> {
+ // `Relation`s used as static inputs, by all variants
+ origin_live_on_entry: Relation<(T::Origin, T::Point)>,
+ loan_invalidated_at: Relation<(T::Loan, T::Point)>,
+
+ // static inputs used via `Variable`s, by all variants
+ subset_base: &'ctx Vec<(T::Origin, T::Origin, T::Point)>,
+ loan_issued_at: &'ctx Vec<(T::Origin, T::Loan, T::Point)>,
+
+ // static inputs used by variants other than `LocationInsensitive`
+ loan_killed_at: Relation<(T::Loan, T::Point)>,
+ known_contains: Relation<(T::Origin, T::Loan)>,
+ placeholder_origin: Relation<(T::Origin, ())>,
+ placeholder_loan: Relation<(T::Loan, T::Origin)>,
+
+ // The `known_placeholder_subset` relation in the facts does not necessarily contain all the
+ // transitive subsets. The transitive closure is always needed, so this version here is fully
+ // closed over.
+ known_placeholder_subset: Relation<(T::Origin, T::Origin)>,
+
+ // while this static input is unused by `LocationInsensitive`, it's depended on by
+ // initialization and liveness, so already computed by the time we get to borrowcking.
+ cfg_edge: Relation<(T::Point, T::Point)>,
+
+ // Partial results possibly used by other variants as input. Not currently used yet.
+ #[allow(dead_code)]
+ potential_errors: Option<FxHashSet<T::Loan>>,
+ #[allow(dead_code)]
+ potential_subset_errors: Option<Relation<(T::Origin, T::Origin)>>,
+}
+
+impl<T: FactTypes> Output<T> {
+ /// All variants require the same initial preparations, done in multiple
+ /// successive steps:
+ /// - compute initialization data
+ /// - compute liveness
+ /// - prepare static inputs as shared `Relation`s
+ /// - in cases where `LocationInsensitive` variant is ran as a filtering pre-pass,
+ /// partial results can also be stored in the context, so that the following
+ /// variant can use it to prune its own input data
+ pub fn compute(all_facts: &AllFacts<T>, algorithm: Algorithm, dump_enabled: bool) -> Self {
+ let mut result = Output::new(dump_enabled);
+
+ // TODO: remove all the cloning thereafter, but that needs to be done in concert with rustc
+
+ let cfg_edge = all_facts.cfg_edge.clone().into();
+
+ // 1) Initialization
+ let initialization_ctx = InitializationContext {
+ child_path: all_facts.child_path.clone(),
+ path_is_var: all_facts.path_is_var.clone(),
+ path_assigned_at_base: all_facts.path_assigned_at_base.clone(),
+ path_moved_at_base: all_facts.path_moved_at_base.clone(),
+ path_accessed_at_base: all_facts.path_accessed_at_base.clone(),
+ };
+
+ let initialization::InitializationResult::<T>(
+ var_maybe_partly_initialized_on_exit,
+ move_errors,
+ ) = initialization::compute(initialization_ctx, &cfg_edge, &mut result);
+
+ // FIXME: move errors should prevent the computation from continuing: we can't compute
+ // liveness and analyze loans accurately when there are move errors, and should early
+ // return here.
+ for &(path, location) in move_errors.iter() {
+ result.move_errors.entry(location).or_default().push(path);
+ }
+
+ // 2) Liveness
+ let liveness_ctx = LivenessContext {
+ var_used_at: all_facts.var_used_at.clone(),
+ var_defined_at: all_facts.var_defined_at.clone(),
+ var_dropped_at: all_facts.var_dropped_at.clone(),
+ use_of_var_derefs_origin: all_facts.use_of_var_derefs_origin.clone(),
+ drop_of_var_derefs_origin: all_facts.drop_of_var_derefs_origin.clone(),
+ };
+
+ let mut origin_live_on_entry = liveness::compute_live_origins(
+ liveness_ctx,
+ &cfg_edge,
+ var_maybe_partly_initialized_on_exit,
+ &mut result,
+ );
+
+ let cfg_node = cfg_edge
+ .iter()
+ .map(|&(point1, _)| point1)
+ .chain(cfg_edge.iter().map(|&(_, point2)| point2))
+ .collect();
+
+ liveness::make_universal_regions_live::<T>(
+ &mut origin_live_on_entry,
+ &cfg_node,
+ &all_facts.universal_region,
+ );
+
+ // 3) Borrow checking
+
+ // Prepare data as datafrog relations, ready to join.
+ //
+ // Note: if rustc and polonius had more interaction, we could also delay or avoid
+ // generating some of the facts that are now always present here. For example,
+ // the `LocationInsensitive` variant doesn't use the `loan_killed_at` relation, so we could
+ // technically delay computing and passing it from rustc, when using this or the `Hybrid`
+ // variants, to after the pre-pass has made sure we actually need to compute the full
+ // analysis. If these facts happened to be recorded in separate MIR walks, we might also
+ // avoid generating those facts.
+
+ let origin_live_on_entry = origin_live_on_entry.into();
+
+ // TODO: also flip the order of this relation's arguments in rustc
+ // from `loan_invalidated_at(point, loan)` to `loan_invalidated_at(loan, point)`.
+ // to avoid this allocation.
+ let loan_invalidated_at = Relation::from_iter(
+ all_facts
+ .loan_invalidated_at
+ .iter()
+ .map(|&(point, loan)| (loan, point)),
+ );
+
+ let loan_killed_at = all_facts.loan_killed_at.clone().into();
+
+ // `known_placeholder_subset` is a list of all the `'a: 'b` subset relations the user gave:
+ // it's not required to be transitive. `known_contains` is its transitive closure: a list
+ // of all the known placeholder loans that each of these placeholder origins contains.
+ // Given the `known_placeholder_subset`s `'a: 'b` and `'b: 'c`: in the `known_contains`
+ // relation, `'a` will also contain `'c`'s placeholder loan.
+ let known_placeholder_subset = all_facts.known_placeholder_subset.clone().into();
+ let known_contains =
+ Output::<T>::compute_known_contains(&known_placeholder_subset, &all_facts.placeholder);
+
+ // Fully close over the `known_placeholder_subset` relation.
+ let known_placeholder_subset =
+ Output::<T>::compute_known_placeholder_subset(&known_placeholder_subset);
+
+ let placeholder_origin: Relation<_> = Relation::from_iter(
+ all_facts
+ .universal_region
+ .iter()
+ .map(|&origin| (origin, ())),
+ );
+
+ let placeholder_loan = Relation::from_iter(
+ all_facts
+ .placeholder
+ .iter()
+ .map(|&(origin, loan)| (loan, origin)),
+ );
+
+ // Ask the variants to compute errors in their own way
+ let mut ctx = Context {
+ origin_live_on_entry,
+ loan_invalidated_at,
+ cfg_edge,
+ subset_base: &all_facts.subset_base,
+ loan_issued_at: &all_facts.loan_issued_at,
+ loan_killed_at,
+ known_contains,
+ known_placeholder_subset,
+ placeholder_origin,
+ placeholder_loan,
+ potential_errors: None,
+ potential_subset_errors: None,
+ };
+
+ let (errors, subset_errors) = match algorithm {
+ Algorithm::LocationInsensitive => {
+ let (potential_errors, potential_subset_errors) =
+ location_insensitive::compute(&ctx, &mut result);
+
+ // Note: the error location is meaningless for a location-insensitive
+ // subset error analysis. This is acceptable here as this variant is not one
+ // which should be used directly besides debugging, the `Hybrid` variant will
+ // take advantage of its result.
+ let potential_subset_errors: Relation<(T::Origin, T::Origin, T::Point)> =
+ Relation::from_iter(
+ potential_subset_errors
+ .into_iter()
+ .map(|&(origin1, origin2)| (origin1, origin2, 0.into())),
+ );
+
+ (potential_errors, potential_subset_errors)
+ }
+ Algorithm::Naive => naive::compute(&ctx, &mut result),
+ Algorithm::DatafrogOpt => datafrog_opt::compute(&ctx, &mut result),
+ Algorithm::Hybrid => {
+ // Execute the fast `LocationInsensitive` computation as a pre-pass:
+ // if it finds no possible errors, we don't need to do the more complex
+ // computations as they won't find errors either, and we can return early.
+ let (potential_errors, potential_subset_errors) =
+ location_insensitive::compute(&ctx, &mut result);
+
+ if potential_errors.is_empty() && potential_subset_errors.is_empty() {
+ // There are no loan errors, nor subset errors, we can early return
+ // empty errors lists and avoid doing the heavy analysis.
+ (potential_errors, Vec::new().into())
+ } else {
+ // Record these potential errors as they can be used to limit the next
+ // variant's work to only these loans.
+ ctx.potential_errors =
+ Some(potential_errors.iter().map(|&(loan, _)| loan).collect());
+ ctx.potential_subset_errors = Some(potential_subset_errors);
+
+ datafrog_opt::compute(&ctx, &mut result)
+ }
+ }
+ Algorithm::Compare => {
+ // Ensure the `Naive` and `DatafrogOpt` errors are the same
+ let (naive_errors, naive_subset_errors) = naive::compute(&ctx, &mut result);
+ let (opt_errors, _) = datafrog_opt::compute(&ctx, &mut result);
+
+ // TODO: compare illegal subset relations errors as well here ?
+
+ let mut naive_errors_by_point = FxHashMap::default();
+ for &(loan, point) in naive_errors.iter() {
+ naive_errors_by_point
+ .entry(point)
+ .or_insert_with(Vec::new)
+ .push(loan);
+ }
+
+ let mut opt_errors_by_point = FxHashMap::default();
+ for &(loan, point) in opt_errors.iter() {
+ opt_errors_by_point
+ .entry(point)
+ .or_insert_with(Vec::new)
+ .push(loan);
+ }
+
+ if compare_errors(&naive_errors_by_point, &opt_errors_by_point) {
+ panic!(concat!(
+ "The errors reported by the naive algorithm differ from ",
+ "the errors reported by the optimized algorithm. ",
+ "See the error log for details."
+ ));
+ } else {
+ debug!("Naive and optimized algorithms reported the same errors.");
+ }
+
+ (naive_errors, naive_subset_errors)
+ }
+ };
+
+ // Record illegal access errors
+ for &(loan, location) in errors.iter() {
+ result.errors.entry(location).or_default().push(loan);
+ }
+
+ // Record illegal subset errors
+ for &(origin1, origin2, location) in subset_errors.iter() {
+ result
+ .subset_errors
+ .entry(location)
+ .or_default()
+ .insert((origin1, origin2));
+ }
+
+ // Record more debugging info when asked to do so
+ if dump_enabled {
+ for &(origin, location) in ctx.origin_live_on_entry.iter() {
+ result
+ .origin_live_on_entry
+ .entry(location)
+ .or_default()
+ .push(origin);
+ }
+
+ for &(origin, loan) in ctx.known_contains.iter() {
+ result
+ .known_contains
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+ }
+
+ result
+ }
+
+ /// Computes the transitive closure of the `known_placeholder_subset` relation, so that we have
+ /// the full list of placeholder loans contained by the placeholder origins.
+ fn compute_known_contains(
+ known_placeholder_subset: &Relation<(T::Origin, T::Origin)>,
+ placeholder: &[(T::Origin, T::Loan)],
+ ) -> Relation<(T::Origin, T::Loan)> {
+ let mut iteration = datafrog::Iteration::new();
+ let known_contains = iteration.variable("known_contains");
+
+ // known_contains(Origin1, Loan1) :-
+ // placeholder(Origin1, Loan1).
+ known_contains.extend(placeholder.iter());
+
+ while iteration.changed() {
+ // known_contains(Origin2, Loan1) :-
+ // known_contains(Origin1, Loan1),
+ // known_placeholder_subset(Origin1, Origin2).
+ known_contains.from_join(
+ &known_contains,
+ known_placeholder_subset,
+ |&_origin1, &loan1, &origin2| (origin2, loan1),
+ );
+ }
+
+ known_contains.complete()
+ }
+
+ /// Computes the transitive closure of the `known_placeholder_subset` relation.
+ fn compute_known_placeholder_subset(
+ known_placeholder_subset_base: &Relation<(T::Origin, T::Origin)>,
+ ) -> Relation<(T::Origin, T::Origin)> {
+ use datafrog::{Iteration, RelationLeaper};
+ let mut iteration = Iteration::new();
+
+ let known_placeholder_subset = iteration.variable("known_placeholder_subset");
+
+ // known_placeholder_subset(Origin1, Origin2) :-
+ // known_placeholder_subset_base(Origin1, Origin2).
+ known_placeholder_subset.extend(known_placeholder_subset_base.iter());
+
+ while iteration.changed() {
+ // known_placeholder_subset(Origin1, Origin3) :-
+ // known_placeholder_subset(Origin1, Origin2),
+ // known_placeholder_subset_base(Origin2, Origin3).
+ known_placeholder_subset.from_leapjoin(
+ &known_placeholder_subset,
+ known_placeholder_subset_base.extend_with(|&(_origin1, origin2)| origin2),
+ |&(origin1, _origin2), &origin3| (origin1, origin3),
+ );
+ }
+
+ known_placeholder_subset.complete()
+ }
+
+ fn new(dump_enabled: bool) -> Self {
+ Output {
+ errors: FxHashMap::default(),
+ subset_errors: FxHashMap::default(),
+ dump_enabled,
+ loan_live_at: FxHashMap::default(),
+ origin_contains_loan_at: FxHashMap::default(),
+ origin_contains_loan_anywhere: FxHashMap::default(),
+ origin_live_on_entry: FxHashMap::default(),
+ loan_invalidated_at: FxHashMap::default(),
+ move_errors: FxHashMap::default(),
+ subset: FxHashMap::default(),
+ subset_anywhere: FxHashMap::default(),
+ var_live_on_entry: FxHashMap::default(),
+ var_drop_live_on_entry: FxHashMap::default(),
+ path_maybe_initialized_on_exit: FxHashMap::default(),
+ path_maybe_uninitialized_on_exit: FxHashMap::default(),
+ var_maybe_partly_initialized_on_exit: FxHashMap::default(),
+ known_contains: FxHashMap::default(),
+ }
+ }
+
+ pub fn errors_at(&self, location: T::Point) -> &[T::Loan] {
+ match self.errors.get(&location) {
+ Some(v) => v,
+ None => &[],
+ }
+ }
+
+ pub fn loans_in_scope_at(&self, location: T::Point) -> &[T::Loan] {
+ match self.loan_live_at.get(&location) {
+ Some(p) => p,
+ None => &[],
+ }
+ }
+
+ pub fn origin_contains_loan_at(
+ &self,
+ location: T::Point,
+ ) -> Cow<'_, BTreeMap<T::Origin, BTreeSet<T::Loan>>> {
+ assert!(self.dump_enabled);
+ match self.origin_contains_loan_at.get(&location) {
+ Some(map) => Cow::Borrowed(map),
+ None => Cow::Owned(BTreeMap::default()),
+ }
+ }
+
+ pub fn origins_live_at(&self, location: T::Point) -> &[T::Origin] {
+ assert!(self.dump_enabled);
+ match self.origin_live_on_entry.get(&location) {
+ Some(v) => v,
+ None => &[],
+ }
+ }
+
+ pub fn subsets_at(
+ &self,
+ location: T::Point,
+ ) -> Cow<'_, BTreeMap<T::Origin, BTreeSet<T::Origin>>> {
+ assert!(self.dump_enabled);
+ match self.subset.get(&location) {
+ Some(v) => Cow::Borrowed(v),
+ None => Cow::Owned(BTreeMap::default()),
+ }
+ }
+}
+
+/// Compares errors reported by Naive implementation with the errors
+/// reported by the optimized implementation.
+fn compare_errors<Loan: Atom, Point: Atom>(
+ all_naive_errors: &FxHashMap<Point, Vec<Loan>>,
+ all_opt_errors: &FxHashMap<Point, Vec<Loan>>,
+) -> bool {
+ let points = all_naive_errors.keys().chain(all_opt_errors.keys());
+
+ let mut differ = false;
+ for point in points {
+ let mut naive_errors = all_naive_errors.get(&point).cloned().unwrap_or_default();
+ naive_errors.sort();
+
+ let mut opt_errors = all_opt_errors.get(&point).cloned().unwrap_or_default();
+ opt_errors.sort();
+
+ for err in naive_errors.iter() {
+ if !opt_errors.contains(err) {
+ error!(
+ "Error {0:?} at {1:?} reported by naive, but not opt.",
+ err, point
+ );
+ differ = true;
+ }
+ }
+
+ for err in opt_errors.iter() {
+ if !naive_errors.contains(err) {
+ error!(
+ "Error {0:?} at {1:?} reported by opt, but not naive.",
+ err, point
+ );
+ differ = true;
+ }
+ }
+ }
+
+ differ
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ impl Atom for usize {
+ fn index(self) -> usize {
+ self
+ }
+ }
+
+ fn compare(
+ errors1: &FxHashMap<usize, Vec<usize>>,
+ errors2: &FxHashMap<usize, Vec<usize>>,
+ ) -> bool {
+ let diff1 = compare_errors(errors1, errors2);
+ let diff2 = compare_errors(errors2, errors1);
+ assert_eq!(diff1, diff2);
+ diff1
+ }
+
+ #[test]
+ fn test_compare_errors() {
+ let empty = FxHashMap::default();
+ assert_eq!(false, compare(&empty, &empty));
+ let mut empty_vec = FxHashMap::default();
+ empty_vec.insert(1, vec![]);
+ empty_vec.insert(2, vec![]);
+ assert_eq!(false, compare(&empty, &empty_vec));
+
+ let mut singleton1 = FxHashMap::default();
+ singleton1.insert(1, vec![10]);
+ assert_eq!(false, compare(&singleton1, &singleton1));
+ let mut singleton2 = FxHashMap::default();
+ singleton2.insert(1, vec![11]);
+ assert_eq!(false, compare(&singleton2, &singleton2));
+ let mut singleton3 = FxHashMap::default();
+ singleton3.insert(2, vec![10]);
+ assert_eq!(false, compare(&singleton3, &singleton3));
+
+ assert_eq!(true, compare(&singleton1, &singleton2));
+ assert_eq!(true, compare(&singleton2, &singleton3));
+ assert_eq!(true, compare(&singleton1, &singleton3));
+
+ assert_eq!(true, compare(&empty, &singleton1));
+ assert_eq!(true, compare(&empty, &singleton2));
+ assert_eq!(true, compare(&empty, &singleton3));
+
+ let mut errors1 = FxHashMap::default();
+ errors1.insert(1, vec![11]);
+ errors1.insert(2, vec![10]);
+ assert_eq!(false, compare(&errors1, &errors1));
+ assert_eq!(true, compare(&errors1, &singleton1));
+ assert_eq!(true, compare(&errors1, &singleton2));
+ assert_eq!(true, compare(&errors1, &singleton3));
+ }
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/naive.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/naive.rs
new file mode 100644
index 0000000..aa42048
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/polonius-engine/src/output/naive.rs
@@ -0,0 +1,299 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A version of the Naive datalog analysis using Datafrog.
+
+use datafrog::{Iteration, Relation, RelationLeaper};
+use std::time::Instant;
+
+use crate::facts::FactTypes;
+use crate::output::{Context, Output};
+
+pub(super) fn compute<T: FactTypes>(
+ ctx: &Context<'_, T>,
+ result: &mut Output<T>,
+) -> (
+ Relation<(T::Loan, T::Point)>,
+ Relation<(T::Origin, T::Origin, T::Point)>,
+) {
+ let timer = Instant::now();
+
+ let (errors, subset_errors) = {
+ // Static inputs
+ let origin_live_on_entry_rel = &ctx.origin_live_on_entry;
+ let cfg_edge = &ctx.cfg_edge;
+ let loan_killed_at = &ctx.loan_killed_at;
+ let known_placeholder_subset = &ctx.known_placeholder_subset;
+ let placeholder_origin = &ctx.placeholder_origin;
+
+ // Create a new iteration context, ...
+ let mut iteration = Iteration::new();
+
+ // .. some variables, ..
+ let subset = iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset");
+ let origin_contains_loan_on_entry =
+ iteration.variable::<(T::Origin, T::Loan, T::Point)>("origin_contains_loan_on_entry");
+ let loan_live_at = iteration.variable::<((T::Loan, T::Point), ())>("loan_live_at");
+
+ // `loan_invalidated_at` facts, stored ready for joins
+ let loan_invalidated_at = Relation::from_iter(
+ ctx.loan_invalidated_at
+ .iter()
+ .map(|&(loan, point)| ((loan, point), ())),
+ );
+
+ // different indices for `subset`.
+ let subset_o1p = iteration.variable_indistinct("subset_o1p");
+ let subset_o2p = iteration.variable_indistinct("subset_o2p");
+
+ // different index for `origin_contains_loan_on_entry`.
+ let origin_contains_loan_on_entry_op =
+ iteration.variable_indistinct("origin_contains_loan_on_entry_op");
+
+ // Unfortunately, we need `origin_live_on_entry` in both variable and relation forms:
+ // We need:
+ // - `origin_live_on_entry` as a Relation for the leapjoins in rules 3 & 6
+ // - `origin_live_on_entry` as a Variable for the join in rule 7
+ //
+ // The leapjoins use `origin_live_on_entry` as `(Origin, Point)` tuples, while the join uses
+ // it as a `((O, P), ())` tuple to filter the `((Origin, Point), Loan)` tuples from
+ // `origin_contains_loan_on_entry_op`.
+ //
+ // The regular join in rule 7 could be turned into a `filter_with` leaper but that would
+ // result in a leapjoin with no `extend_*` leapers: a leapjoin that is not well-formed.
+ // Doing the filtering via an `extend_with` leaper would be extremely inefficient.
+ //
+ // Until there's an API in datafrog to handle this use-case better, we do a slightly less
+ // inefficient thing of copying the whole static input into a Variable to use a regular
+ // join, even though the liveness information can be quite heavy (around 1M tuples
+ // on `clap`).
+ // This is the Naive variant so this is not a big problem, but needs an
+ // explanation.
+ let origin_live_on_entry_var =
+ iteration.variable::<((T::Origin, T::Point), ())>("origin_live_on_entry");
+ origin_live_on_entry_var.extend(
+ origin_live_on_entry_rel
+ .iter()
+ .map(|&(origin, point)| ((origin, point), ())),
+ );
+
+ // output relations: illegal accesses errors, and illegal subset relations errors
+ let errors = iteration.variable("errors");
+ let subset_errors = iteration.variable::<(T::Origin, T::Origin, T::Point)>("subset_errors");
+
+ // load initial facts:
+
+ // Rule 1: the initial subsets are the non-transitive `subset_base` static input.
+ //
+ // subset(Origin1, Origin2, Point) :-
+ // subset_base(Origin1, Origin2, Point).
+ subset.extend(ctx.subset_base.iter());
+
+ // Rule 4: the issuing origins are the ones initially containing loans.
+ //
+ // origin_contains_loan_on_entry(Origin, Loan, Point) :-
+ // loan_issued_at(Origin, Loan, Point).
+ origin_contains_loan_on_entry.extend(ctx.loan_issued_at.iter());
+
+ // .. and then start iterating rules!
+ while iteration.changed() {
+ // Cleanup step: remove symmetries
+ // - remove origins which are `subset`s of themselves
+ //
+ // FIXME: investigate whether is there a better way to do that without complicating
+ // the rules too much, because it would also require temporary variables and
+ // impact performance. Until then, the big reduction in tuples improves performance
+ // a lot, even if we're potentially adding a small number of tuples
+ // per round just to remove them in the next round.
+ subset
+ .recent
+ .borrow_mut()
+ .elements
+ .retain(|&(origin1, origin2, _)| origin1 != origin2);
+
+ // Remap fields to re-index by keys, to prepare the data needed by the rules below.
+ subset_o1p.from_map(&subset, |&(origin1, origin2, point)| {
+ ((origin1, point), origin2)
+ });
+ subset_o2p.from_map(&subset, |&(origin1, origin2, point)| {
+ ((origin2, point), origin1)
+ });
+
+ origin_contains_loan_on_entry_op
+ .from_map(&origin_contains_loan_on_entry, |&(origin, loan, point)| {
+ ((origin, point), loan)
+ });
+
+ // Rule 1: done above, as part of the static input facts setup.
+
+ // Rule 2: compute the subset transitive closure, at a given point.
+ //
+ // subset(Origin1, Origin3, Point) :-
+ // subset(Origin1, Origin2, Point),
+ // subset(Origin2, Origin3, Point).
+ subset.from_join(
+ &subset_o2p,
+ &subset_o1p,
+ |&(_origin2, point), &origin1, &origin3| (origin1, origin3, point),
+ );
+
+ // Rule 3: propagate subsets along the CFG, according to liveness.
+ //
+ // subset(Origin1, Origin2, Point2) :-
+ // subset(Origin1, Origin2, Point1),
+ // cfg_edge(Point1, Point2),
+ // origin_live_on_entry(Origin1, Point2),
+ // origin_live_on_entry(Origin2, Point2).
+ subset.from_leapjoin(
+ &subset,
+ (
+ cfg_edge.extend_with(|&(_origin1, _origin2, point1)| point1),
+ origin_live_on_entry_rel.extend_with(|&(origin1, _origin2, _point1)| origin1),
+ origin_live_on_entry_rel.extend_with(|&(_origin1, origin2, _point1)| origin2),
+ ),
+ |&(origin1, origin2, _point1), &point2| (origin1, origin2, point2),
+ );
+
+ // Rule 4: done above as part of the static input facts setup.
+
+ // Rule 5: propagate loans within origins, at a given point, according to subsets.
+ //
+ // origin_contains_loan_on_entry(Origin2, Loan, Point) :-
+ // origin_contains_loan_on_entry(Origin1, Loan, Point),
+ // subset(Origin1, Origin2, Point).
+ origin_contains_loan_on_entry.from_join(
+ &origin_contains_loan_on_entry_op,
+ &subset_o1p,
+ |&(_origin1, point), &loan, &origin2| (origin2, loan, point),
+ );
+
+ // Rule 6: propagate loans along the CFG, according to liveness.
+ //
+ // origin_contains_loan_on_entry(Origin, Loan, Point2) :-
+ // origin_contains_loan_on_entry(Origin, Loan, Point1),
+ // !loan_killed_at(Loan, Point1),
+ // cfg_edge(Point1, Point2),
+ // origin_live_on_entry(Origin, Point2).
+ origin_contains_loan_on_entry.from_leapjoin(
+ &origin_contains_loan_on_entry,
+ (
+ loan_killed_at.filter_anti(|&(_origin, loan, point1)| (loan, point1)),
+ cfg_edge.extend_with(|&(_origin, _loan, point1)| point1),
+ origin_live_on_entry_rel.extend_with(|&(origin, _loan, _point1)| origin),
+ ),
+ |&(origin, loan, _point1), &point2| (origin, loan, point2),
+ );
+
+ // Rule 7: compute whether a loan is live at a given point, i.e. whether it is
+ // contained in a live origin at this point.
+ //
+ // loan_live_at(Loan, Point) :-
+ // origin_contains_loan_on_entry(Origin, Loan, Point),
+ // origin_live_on_entry(Origin, Point).
+ loan_live_at.from_join(
+ &origin_contains_loan_on_entry_op,
+ &origin_live_on_entry_var,
+ |&(_origin, point), &loan, _| ((loan, point), ()),
+ );
+
+ // Rule 8: compute illegal access errors, i.e. an invalidation of a live loan.
+ //
+ // Here again, this join acts as a pure filter and could be a more efficient leapjoin.
+ // However, similarly to the `origin_live_on_entry` example described above, the
+ // leapjoin with a single `filter_with` leaper would currently not be well-formed.
+ // We don't explictly need to materialize `loan_live_at` either, and that doesn't
+ // change the well-formedness situation, so we still materialize it (since that also
+ // helps in testing).
+ //
+ // errors(Loan, Point) :-
+ // loan_invalidated_at(Loan, Point),
+ // loan_live_at(Loan, Point).
+ errors.from_join(
+ &loan_live_at,
+ &loan_invalidated_at,
+ |&(loan, point), _, _| (loan, point),
+ );
+
+ // Rule 9: compute illegal subset relations errors, i.e. the undeclared subsets
+ // between two placeholder origins.
+ // Here as well, WF-ness prevents this join from being a filter-only leapjoin. It
+ // doesn't matter much, as `placeholder_origin` is single-value relation.
+ //
+ // subset_error(Origin1, Origin2, Point) :-
+ // subset(Origin1, Origin2, Point),
+ // placeholder_origin(Origin1),
+ // placeholder_origin(Origin2),
+ // !known_placeholder_subset(Origin1, Origin2).
+ subset_errors.from_leapjoin(
+ &subset,
+ (
+ placeholder_origin.extend_with(|&(origin1, _origin2, _point)| origin1),
+ placeholder_origin.extend_with(|&(_origin1, origin2, _point)| origin2),
+ known_placeholder_subset
+ .filter_anti(|&(origin1, origin2, _point)| (origin1, origin2)),
+ // remove symmetries:
+ datafrog::ValueFilter::from(|&(origin1, origin2, _point), _| {
+ origin1 != origin2
+ }),
+ ),
+ |&(origin1, origin2, point), _| (origin1, origin2, point),
+ );
+ }
+
+ // Handle verbose output data
+ if result.dump_enabled {
+ let subset = subset.complete();
+ assert!(
+ subset
+ .iter()
+ .filter(|&(origin1, origin2, _)| origin1 == origin2)
+ .count()
+ == 0,
+ "unwanted subset symmetries"
+ );
+ for &(origin1, origin2, location) in subset.iter() {
+ result
+ .subset
+ .entry(location)
+ .or_default()
+ .entry(origin1)
+ .or_default()
+ .insert(origin2);
+ }
+
+ let origin_contains_loan_on_entry = origin_contains_loan_on_entry.complete();
+ for &(origin, loan, location) in origin_contains_loan_on_entry.iter() {
+ result
+ .origin_contains_loan_at
+ .entry(location)
+ .or_default()
+ .entry(origin)
+ .or_default()
+ .insert(loan);
+ }
+
+ let loan_live_at = loan_live_at.complete();
+ for &((loan, location), _) in loan_live_at.iter() {
+ result.loan_live_at.entry(location).or_default().push(loan);
+ }
+ }
+
+ (errors.complete(), subset_errors.complete())
+ };
+
+ info!(
+ "analysis done: {} `errors` tuples, {} `subset_errors` tuples, {:?}",
+ errors.len(),
+ subset_errors.len(),
+ timer.elapsed()
+ );
+
+ (errors, subset_errors)
+}
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/.cargo-checksum.json b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/.cargo-checksum.json
new file mode 100644
index 0000000..544af9f
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"CODE_OF_CONDUCT.md":"edca092fde496419a9f1ba640048aa0270b62dfea576cd3175f0b53e3c230470","Cargo.toml":"647814b27b6fc4fbef1df70d796b53b723e776b68467372044e4182763007379","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"cac8197ac869d64a6efc26cab883a269392ae6db51f7453bca722f8f31d67c7c","src/lib.rs":"ddecafb5db609d0d8eebd19e4d98dc865e7e9282a4183421f9bd765c01a231c0"},"package":"08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"} \ No newline at end of file
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/CODE_OF_CONDUCT.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..d70b2b5
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/CODE_OF_CONDUCT.md
@@ -0,0 +1,40 @@
+# The Rust Code of Conduct
+
+A version of this document [can be found online](https://www.rust-lang.org/conduct.html).
+
+## Conduct
+
+**Contact**: [rust-mods@rust-lang.org](mailto:rust-mods@rust-lang.org)
+
+* We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
+* On IRC, please avoid using overtly sexual nicknames or other nicknames that might detract from a friendly, safe and welcoming environment for all.
+* Please be kind and courteous. There's no need to be mean or rude.
+* Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
+* Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
+* We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term "harassment" as including the definition in the <a href="http://citizencodeofconduct.org/">Citizen Code of Conduct</a>; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups.
+* Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the [Rust moderation team][mod_team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a safe place for you and we've got your back.
+* Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
+
+## Moderation
+
+
+These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation, please contact the [Rust moderation team][mod_team].
+
+1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
+2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
+3. Moderators will first respond to such remarks with a warning.
+4. If the warning is unheeded, the user will be "kicked," i.e., kicked out of the communication channel to cool off.
+5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
+6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
+7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, **in private**. Complaints about bans in-channel are not allowed.
+8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
+
+In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
+
+And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
+
+The enforcement policies listed above apply to all official Rust venues; including official IRC channels (#rust, #rust-internals, #rust-tools, #rust-libs, #rustc, #rust-beginners, #rust-docs, #rust-community, #rust-lang, and #cargo); GitHub repositories under rust-lang, rust-lang-nursery, and rust-lang-deprecated; and all forums under rust-lang.org (users.rust-lang.org, internals.rust-lang.org). For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
+
+*Adapted from the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).*
+
+[mod_team]: https://www.rust-lang.org/team.html#Moderation-team
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/Cargo.toml b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/Cargo.toml
new file mode 100644
index 0000000..47330b7
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/Cargo.toml
@@ -0,0 +1,25 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "rustc-hash"
+version = "1.1.0"
+authors = ["The Rust Project Developers"]
+description = "speed, non-cryptographic hash used in rustc"
+readme = "README.md"
+keywords = ["hash", "fxhash", "rustc"]
+license = "Apache-2.0/MIT"
+repository = "https://github.com/rust-lang-nursery/rustc-hash"
+
+[features]
+default = ["std"]
+std = []
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-APACHE b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-APACHE
new file mode 100644
index 0000000..16fe87b
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-MIT b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-MIT
new file mode 100644
index 0000000..31aa793
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/README.md b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/README.md
new file mode 100644
index 0000000..e33057a
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/README.md
@@ -0,0 +1,38 @@
+# rustc-hash
+
+[![crates.io](https://img.shields.io/crates/v/rustc-hash.svg)](https://crates.io/crates/rustc-hash)
+[![Documentation](https://docs.rs/rustc-hash/badge.svg)](https://docs.rs/rustc-hash)
+
+A speedy hash algorithm used within rustc. The hashmap in liballoc by
+default uses SipHash which isn't quite as speedy as we want. In the
+compiler we're not really worried about DOS attempts, so we use a fast
+non-cryptographic hash.
+
+This is the same as the algorithm used by Firefox -- which is a
+homespun one not based on any widely-known algorithm -- though
+modified to produce 64-bit hash values instead of 32-bit hash
+values. It consistently out-performs an FNV-based hash within rustc
+itself -- the collision rate is similar or slightly worse than FNV,
+but the speed of the hash function itself is much higher because it
+works on up to 8 bytes at a time.
+
+## Usage
+
+```rust
+use rustc_hash::FxHashMap;
+
+let mut map: FxHashMap<u32, u32> = FxHashMap::default();
+map.insert(22, 44);
+```
+
+### `no_std`
+
+This crate can be used as a `no_std` crate by disabling the `std`
+feature, which is on by default, as follows:
+
+```toml
+rustc-hash = { version = "1.0", default-features = false }
+```
+
+In this configuration, `FxHasher` is the only export, and the
+`FxHashMap`/`FxHashSet` type aliases are omitted.
diff --git a/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/src/lib.rs b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/src/lib.rs
new file mode 100644
index 0000000..ee9ad31
--- /dev/null
+++ b/gcc/rust/checks/errors/borrowck/ffi-polonius/vendor/rustc-hash/src/lib.rs
@@ -0,0 +1,148 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Fast, non-cryptographic hash used by rustc and Firefox.
+//!
+//! # Example
+//!
+//! ```rust
+//! # #[cfg(feature = "std")]
+//! # fn main() {
+//! use rustc_hash::FxHashMap;
+//! let mut map: FxHashMap<u32, u32> = FxHashMap::default();
+//! map.insert(22, 44);
+//! # }
+//! # #[cfg(not(feature = "std"))]
+//! # fn main() { }
+//! ```
+
+#![no_std]
+
+#[cfg(feature = "std")]
+extern crate std;
+
+use core::convert::TryInto;
+use core::default::Default;
+#[cfg(feature = "std")]
+use core::hash::BuildHasherDefault;
+use core::hash::Hasher;
+use core::mem::size_of;
+use core::ops::BitXor;
+#[cfg(feature = "std")]
+use std::collections::{HashMap, HashSet};
+
+/// Type alias for a hashmap using the `fx` hash algorithm.
+#[cfg(feature = "std")]
+pub type FxHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher>>;
+
+/// Type alias for a hashmap using the `fx` hash algorithm.
+#[cfg(feature = "std")]
+pub type FxHashSet<V> = HashSet<V, BuildHasherDefault<FxHasher>>;
+
+/// A speedy hash algorithm for use within rustc. The hashmap in liballoc
+/// by default uses SipHash which isn't quite as speedy as we want. In the
+/// compiler we're not really worried about DOS attempts, so we use a fast
+/// non-cryptographic hash.
+///
+/// This is the same as the algorithm used by Firefox -- which is a homespun
+/// one not based on any widely-known algorithm -- though modified to produce
+/// 64-bit hash values instead of 32-bit hash values. It consistently
+/// out-performs an FNV-based hash within rustc itself -- the collision rate is
+/// similar or slightly worse than FNV, but the speed of the hash function
+/// itself is much higher because it works on up to 8 bytes at a time.
+pub struct FxHasher {
+ hash: usize,
+}
+
+#[cfg(target_pointer_width = "32")]
+const K: usize = 0x9e3779b9;
+#[cfg(target_pointer_width = "64")]
+const K: usize = 0x517cc1b727220a95;
+
+impl Default for FxHasher {
+ #[inline]
+ fn default() -> FxHasher {
+ FxHasher { hash: 0 }
+ }
+}
+
+impl FxHasher {
+ #[inline]
+ fn add_to_hash(&mut self, i: usize) {
+ self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K);
+ }
+}
+
+impl Hasher for FxHasher {
+ #[inline]
+ fn write(&mut self, mut bytes: &[u8]) {
+ #[cfg(target_pointer_width = "32")]
+ let read_usize = |bytes: &[u8]| u32::from_ne_bytes(bytes[..4].try_into().unwrap());
+ #[cfg(target_pointer_width = "64")]
+ let read_usize = |bytes: &[u8]| u64::from_ne_bytes(bytes[..8].try_into().unwrap());
+
+ let mut hash = FxHasher { hash: self.hash };
+ assert!(size_of::<usize>() <= 8);
+ while bytes.len() >= size_of::<usize>() {
+ hash.add_to_hash(read_usize(bytes) as usize);
+ bytes = &bytes[size_of::<usize>()..];
+ }
+ if (size_of::<usize>() > 4) && (bytes.len() >= 4) {
+ hash.add_to_hash(u32::from_ne_bytes(bytes[..4].try_into().unwrap()) as usize);
+ bytes = &bytes[4..];
+ }
+ if (size_of::<usize>() > 2) && bytes.len() >= 2 {
+ hash.add_to_hash(u16::from_ne_bytes(bytes[..2].try_into().unwrap()) as usize);
+ bytes = &bytes[2..];
+ }
+ if (size_of::<usize>() > 1) && bytes.len() >= 1 {
+ hash.add_to_hash(bytes[0] as usize);
+ }
+ self.hash = hash.hash;
+ }
+
+ #[inline]
+ fn write_u8(&mut self, i: u8) {
+ self.add_to_hash(i as usize);
+ }
+
+ #[inline]
+ fn write_u16(&mut self, i: u16) {
+ self.add_to_hash(i as usize);
+ }
+
+ #[inline]
+ fn write_u32(&mut self, i: u32) {
+ self.add_to_hash(i as usize);
+ }
+
+ #[cfg(target_pointer_width = "32")]
+ #[inline]
+ fn write_u64(&mut self, i: u64) {
+ self.add_to_hash(i as usize);
+ self.add_to_hash((i >> 32) as usize);
+ }
+
+ #[cfg(target_pointer_width = "64")]
+ #[inline]
+ fn write_u64(&mut self, i: u64) {
+ self.add_to_hash(i as usize);
+ }
+
+ #[inline]
+ fn write_usize(&mut self, i: usize) {
+ self.add_to_hash(i);
+ }
+
+ #[inline]
+ fn finish(&self) -> u64 {
+ self.hash as u64
+ }
+}
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index 81bdfaa..c6ccd46 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,108 @@
+2025-03-18 Marek Polacek <polacek@redhat.com>
+
+ PR c++/119344
+ * g++.dg/conversion/ptrmem10.C: New test.
+
+2025-03-18 Jason Merrill <jason@redhat.com>
+
+ PR c++/119194
+ * g++.dg/template/linkage7.C: New test.
+
+2025-03-18 Marek Polacek <polacek@redhat.com>
+
+ PR c++/118104
+ * g++.dg/cpp0x/alias-decl-variadic3.C: New test.
+
+2025-03-18 Harald Anlauf <anlauf@gmx.de>
+
+ PR fortran/119338
+ * gfortran.dg/deferred_character_18.f90: Adjust testcase.
+ * gfortran.dg/allocate_assumed_charlen_5.f90: New test.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/119311
+ * c-c++-common/musttail14.c: Use * instead of \* in the regexps.
+ * c-c++-common/musttail25.c: New test.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR c/116545
+ * c-c++-common/attr-fallthrough-2.c: Adjust expected diagnostics
+ for C++.
+ * c-c++-common/musttail15.c: New test.
+ * c-c++-common/musttail16.c: New test.
+ * c-c++-common/musttail17.c: New test.
+ * c-c++-common/musttail18.c: New test.
+ * c-c++-common/musttail19.c: New test.
+ * c-c++-common/musttail20.c: New test.
+ * c-c++-common/musttail21.c: New test.
+ * c-c++-common/musttail22.c: New test.
+ * c-c++-common/musttail23.c: New test.
+ * c-c++-common/musttail24.c: New test.
+ * g++.dg/musttail7.C: New test.
+ * g++.dg/musttail8.C: New test.
+ * g++.dg/musttail12.C: New test.
+ * g++.dg/musttail13.C: New test.
+ * g++.dg/musttail14.C: New test.
+ * g++.dg/ext/pr116545.C: New test.
+
+2025-03-18 Bob Dubner <rdubner@symas.com>
+
+ * cobol.dg/group1/check_88.cob: New testcase.
+ * cobol.dg/group1/comp5.cob: Likewise.
+ * cobol.dg/group1/declarative_1.cob: Likewise.
+ * cobol.dg/group1/display.cob: Likewise.
+ * cobol.dg/group1/display2.cob: Likewise.
+ * cobol.dg/group1/line-sequential.cob: Likewise.
+ * cobol.dg/group1/multiple-compares.cob: Likewise.
+ * cobol.dg/group1/multiply2.cob: Likewise.
+ * cobol.dg/group1/packed.cob: Likewise.
+ * cobol.dg/group1/perform-nested-exit.cob: Likewise.
+ * cobol.dg/group1/pointer1.cob: Likewise.
+ * cobol.dg/group1/simple-arithmetic.cob: Likewise.
+ * cobol.dg/group1/simple-classes.cob: Likewise.
+ * cobol.dg/group1/simple-if.cob: Likewise.
+ * cobol.dg/group1/simple-perform.cob: Likewise.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ * lib/gcc-dg.exp (${tool}-load): If output-file is set, compare
+ combined output against content of the [lindex ${output-file} 1]
+ file.
+ (dg-output-file): New directive.
+ * lib/dg-test-cleanup.exp (cleanup-after-saved-dg-test): Clear
+ output-file variable.
+ * gcc.dg/dg-output-file-1.c: New test.
+ * gcc.dg/dg-output-file-1-lp64.txt: New test.
+ * gcc.dg/dg-output-file-1-ilp32.txt: New test.
+
+2025-03-18 Patrick Palka <ppalka@redhat.com>
+
+ PR c++/119233
+ * g++.dg/template/fn-ptr5.C: New test.
+
+2025-03-18 Jakub Jelinek <jakub@redhat.com>
+
+ PR rtl-optimization/119307
+ * gcc.target/i386/pr119307.c: New test.
+
+2025-03-18 Richard Biener <rguenther@suse.de>
+
+ PR debug/101533
+ * g++.dg/debug/pr101533.C: New testcase.
+
+2025-03-18 Haochen Jiang <haochen.jiang@intel.com>
+
+ * gcc.target/i386/avx512f-pr103750-1.c: Remove XFAIL.
+ * gcc.target/i386/avx512f-pr103750-2.c: Ditto.
+ * gcc.target/i386/avx512fp16-pr103750-1.c: Ditto.
+ * gcc.target/i386/avx512fp16-pr103750-2.c: Ditto.
+
+2025-03-18 Jeff Law <jlaw@ventanamicro.com>
+
+ * gcc.target/riscv/redundant-andi-2.c: New test.
+
2025-03-17 Jeff Law <jlaw@ventanamicro.com>
* gcc.target/riscv/redundant-andi.c: New test.
diff --git a/gcc/testsuite/g++.dg/conversion/ptrmem10.C b/gcc/testsuite/g++.dg/conversion/ptrmem10.C
new file mode 100644
index 0000000..b5fc050
--- /dev/null
+++ b/gcc/testsuite/g++.dg/conversion/ptrmem10.C
@@ -0,0 +1,14 @@
+// PR c++/119344
+// { dg-do compile { target c++11 } }
+
+struct S {
+ void fn();
+};
+typedef void (S::*T)(void);
+template <T Ptr>
+struct s
+{
+ static const bool t = Ptr != T();
+};
+
+int t1 = s<&S::fn>::t;
diff --git a/gcc/testsuite/g++.dg/cpp0x/alias-decl-variadic3.C b/gcc/testsuite/g++.dg/cpp0x/alias-decl-variadic3.C
new file mode 100644
index 0000000..077f033
--- /dev/null
+++ b/gcc/testsuite/g++.dg/cpp0x/alias-decl-variadic3.C
@@ -0,0 +1,22 @@
+// PR c++/118104
+// { dg-do compile { target c++11 } }
+
+template<typename... Zs> struct Z { };
+
+template <class... Ts> struct X {
+ template <class... Us> using W = Z<void(Ts, Us)...>;
+ template <class... Us> using Y = X<void(Ts, Us)...>;
+};
+
+template <class A, class... P>
+using foo = X<int, int>::W<A, P...>;
+
+template <class A, class... P>
+using bar = X<int, int>::Y<A, P...>;
+
+void
+g ()
+{
+ foo<int, int> f;
+ bar<int, int> b;
+}
diff --git a/gcc/testsuite/g++.dg/template/linkage7.C b/gcc/testsuite/g++.dg/template/linkage7.C
new file mode 100644
index 0000000..6686a0e
--- /dev/null
+++ b/gcc/testsuite/g++.dg/template/linkage7.C
@@ -0,0 +1,17 @@
+// PR c++/119194
+// { dg-do compile { target c++11 } }
+
+template <const int& Str>
+[[gnu::noipa]]
+int get_length() {
+ return Str;
+}
+static constexpr int sssss{ 3};
+int main() {
+ if (get_length<sssss>() != sssss)
+ __builtin_abort();
+ return 0;
+}
+
+// { dg-final { scan-assembler {_Z10get_lengthIL_ZL5sssssEEiv} } }
+// { dg-final { scan-assembler-not {(weak|glob)[^\n]*_Z10get_lengthIL_Z5sssssEEiv} } }
diff --git a/gcc/testsuite/gcc.target/i386/apx-ndd-tls-1b.c b/gcc/testsuite/gcc.target/i386/apx-ndd-tls-1b.c
index d063703..afcad0c 100644
--- a/gcc/testsuite/gcc.target/i386/apx-ndd-tls-1b.c
+++ b/gcc/testsuite/gcc.target/i386/apx-ndd-tls-1b.c
@@ -3,7 +3,10 @@
/* { dg-require-effective-target tls } */
/* { dg-require-effective-target code_6_gottpoff_reloc } */
/* { dg-options "-save-temps -std=gnu17 -mapxf -O3 -w" } */
-
+/* The testcase is fragile, it's supposed to check the compiler
+ ability of generating code_6_gottpoff_reloc instruction, but
+ failed since there's a seg_prefixed memory
+ usage(r14-6242-gd564198f960a2f). */
#include "apx-ndd-tls-1a.c"
-/* { dg-final { scan-assembler-times "addq\[ \t]+%r\[a-z0-9\]+, a@gottpoff\\(%rip\\), %r\[a-z0-9\]+" 1 { target lp64 } } } */
+/* { dg-final { scan-assembler-times "addq\[ \t]+%r\[a-z0-9\]+, a@gottpoff\\(%rip\\), %r\[a-z0-9\]+" 1 { xfail lp64 } } } */
diff --git a/gcc/testsuite/gfortran.dg/allocate_assumed_charlen_5.f90 b/gcc/testsuite/gfortran.dg/allocate_assumed_charlen_5.f90
new file mode 100644
index 0000000..bc75dbe
--- /dev/null
+++ b/gcc/testsuite/gfortran.dg/allocate_assumed_charlen_5.f90
@@ -0,0 +1,17 @@
+! { dg-do compile }
+! PR fortran/119338 - check F2003:C626
+
+module m
+ implicit none
+contains
+ subroutine sub (s, c)
+ character(len=*), allocatable, intent(out) :: s(:)
+ character(len=*), allocatable, intent(out) :: c
+ allocate(s(5)) ! OK
+ allocate(c) ! OK
+ allocate(character(len=*) :: s(5)) ! OK
+ allocate(character(len=*) :: c) ! OK
+ allocate(character(len=10) :: s(5)) ! { dg-error "shall be an asterisk" }
+ allocate(character(len=10) :: c) ! { dg-error "shall be an asterisk" }
+ end subroutine sub
+end module m
diff --git a/gcc/testsuite/gfortran.dg/deferred_character_18.f90 b/gcc/testsuite/gfortran.dg/deferred_character_18.f90
index 1b1457f..b1229c2 100644
--- a/gcc/testsuite/gfortran.dg/deferred_character_18.f90
+++ b/gcc/testsuite/gfortran.dg/deferred_character_18.f90
@@ -11,7 +11,8 @@ contains
character(*), allocatable, intent(out) :: str
! Note: Star ^ should have been a colon (:)
- allocate (character(n)::str)
+! allocate (character(n)::str) ! original invalid version from pr82367
+ allocate (character(*)::str) ! corrected (see F2003:C626 and pr119338)
end subroutine
diff --git a/include/ChangeLog b/include/ChangeLog
index 2f5b630..8d58040 100644
--- a/include/ChangeLog
+++ b/include/ChangeLog
@@ -1,3 +1,9 @@
+2025-03-18 Jose E. Marchesi <jose.marchesi@oracle.com>
+
+ * libiberty.h (ldirname): New function declaration.
+ (dos_ldirname): Likewise.
+ (unix_ldirname): Likewise.
+
2025-02-11 Roger Sayle <roger@nextmovesoftware.com>
* dwarf2.def (DW_CFA_AARCH64_negate_ra_state_with_pc): Define.
diff --git a/libcpp/po/ChangeLog b/libcpp/po/ChangeLog
index 8ed7beb..54968db 100644
--- a/libcpp/po/ChangeLog
+++ b/libcpp/po/ChangeLog
@@ -1,3 +1,7 @@
+2025-03-18 Joseph Myers <josmyers@redhat.com>
+
+ * fr.po, sv.po: Update.
+
2025-03-17 Joseph Myers <josmyers@redhat.com>
* be.po, ca.po, da.po, de.po, el.po, eo.po, es.po, fi.po, fr.po,
diff --git a/libiberty/ChangeLog b/libiberty/ChangeLog
index 0d27c12..74e4161 100644
--- a/libiberty/ChangeLog
+++ b/libiberty/ChangeLog
@@ -1,3 +1,12 @@
+2025-03-18 Jose E. Marchesi <jose.marchesi@oracle.com>
+
+ * ldirname.c: New file.
+ * Makefile.in (CFILES): Add ldirname.c.
+ (REQUIRED_OFILES): Add ldirname.$(objext).
+ (./ldirname.$(objext)): New rule.
+ * makefile.vms (OBJS): Add ldirname.obj.
+ * configure.com (FILES): Add ldirname.
+
2024-12-11 Matthieu Longo <matthieu.longo@arm.com>
* configure: Regenerate.
diff --git a/libphobos/ChangeLog b/libphobos/ChangeLog
index 872d059..d2308d9 100644
--- a/libphobos/ChangeLog
+++ b/libphobos/ChangeLog
@@ -1,3 +1,35 @@
+2025-03-18 Iain Buclaw <ibuclaw@gdcproject.org>
+
+ * src/MERGE: Merge upstream phobos 79cbde1ab.
+
+2025-03-18 Iain Buclaw <ibuclaw@gdcproject.org>
+
+ * src/MERGE: Merge upstream phobos cafe86453.
+
+2025-03-18 Iain Buclaw <ibuclaw@gdcproject.org>
+
+ * libdruntime/MERGE: Merge upstream druntime d2ee11364c.
+ * testsuite/libphobos.aa/test_aa.d: Add new test.
+ * testsuite/libphobos.betterc/test19933.d: Adjust imports.
+ * testsuite/libphobos.config/test22523.d: Likewise.
+ * testsuite/libphobos.exceptions/assert_fail.d: Adjust test.
+ * testsuite/libphobos.exceptions/chain.d: Adjust imports.
+ * testsuite/libphobos.exceptions/future_message.d: Likewise.
+ * testsuite/libphobos.exceptions/line_trace.d: Likewise.
+ * testsuite/libphobos.exceptions/long_backtrace_trunc.d: Likewise.
+ * testsuite/libphobos.exceptions/static_dtor.d: Likewise.
+ * testsuite/libphobos.gc/forkgc.d: Likewise.
+ * testsuite/libphobos.gc/precisegc.d: Likewise.
+ * testsuite/libphobos.gc/recoverfree.d: Likewise.
+ * testsuite/libphobos.hash/test_hash.d: Likewise.
+ * testsuite/libphobos.init_fini/custom_gc.d: Likewise.
+ * testsuite/libphobos.init_fini/thread_join.d: Likewise.
+ * testsuite/libphobos.thread/external_threads.d: Likewise.
+ * testsuite/libphobos.thread/fiber_guard_page.d: Likewise.
+ * testsuite/libphobos.thread/tlsgc_sections.d: Likewise.
+ * testsuite/libphobos.thread/tlsstack.d: Likewise.
+ * testsuite/libphobos.unittest/customhandler.d: Likewise.
+
2025-03-17 Iain Buclaw <ibuclaw@gdcproject.org>
* src/Makefile.am (PHOBOS_DSOURCES): Add etc/c/odbc/odbc32.d,
diff --git a/libphobos/src/MERGE b/libphobos/src/MERGE
index a5a685d..a4888fc 100644
--- a/libphobos/src/MERGE
+++ b/libphobos/src/MERGE
@@ -1,4 +1,4 @@
-0faae92d62bdc1cc1982f0e9c65830ece1677289
+79cbde1ab69bae9372f310d663edfc43166095e3
The first line of this file holds the git revision number of the last
merge done from the dlang/phobos repository.
diff --git a/libphobos/src/std/getopt.d b/libphobos/src/std/getopt.d
index 1a90722..fc5cdac 100644
--- a/libphobos/src/std/getopt.d
+++ b/libphobos/src/std/getopt.d
@@ -610,14 +610,14 @@ private template optionValidator(A...)
alias optionValidator = message;
}
-private void handleConversion(R)(string option, string value, R* receiver,
+private auto getoptTo(R)(string option, string value,
size_t idx, string file = __FILE__, size_t line = __LINE__)
{
import std.conv : to, ConvException;
import std.format : format;
try
{
- *receiver = to!(typeof(*receiver))(value);
+ return to!R(value);
}
catch (ConvException e)
{
@@ -876,12 +876,18 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
// (and potentially args[i + 1] too, but that comes later)
args = args[0 .. i] ~ args[i + 1 .. $];
- static if (is(typeof(*receiver) == bool))
+ static if (is(typeof(*receiver)))
+ alias Target = typeof(*receiver);
+ else
+ // delegate
+ alias Target = void;
+
+ static if (is(Target == bool))
{
if (val.length)
{
// parse '--b=true/false'
- handleConversion(option, val, receiver, i);
+ *receiver = getoptTo!(Target)(option, val, i);
}
else
{
@@ -894,23 +900,23 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
import std.exception : enforce;
// non-boolean option, which might include an argument
enum isCallbackWithLessThanTwoParameters =
- (is(typeof(receiver) == delegate) || is(typeof(*receiver) == function)) &&
+ (is(R == delegate) || is(Target == function)) &&
!is(typeof(receiver("", "")));
if (!isCallbackWithLessThanTwoParameters && !(val.length) && !incremental)
{
// Eat the next argument too. Check to make sure there's one
// to be eaten first, though.
enforce!GetOptException(i < args.length,
- "Missing value for argument " ~ a ~ ".");
+ "Missing value for argument " ~ a ~ ".");
val = args[i];
args = args[0 .. i] ~ args[i + 1 .. $];
}
- static if (is(typeof(*receiver) == enum) ||
- is(typeof(*receiver) == string))
+ static if (is(Target == enum) ||
+ is(Target == string))
{
- handleConversion(option, val, receiver, i);
+ *receiver = getoptTo!Target(option, val, i);
}
- else static if (is(typeof(*receiver) : real))
+ else static if (is(Target : real))
{
// numeric receiver
if (incremental)
@@ -919,16 +925,16 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
}
else
{
- handleConversion(option, val, receiver, i);
+ *receiver = getoptTo!Target(option, val, i);
}
}
- else static if (is(typeof(*receiver) == string))
+ else static if (is(Target == string))
{
// string receiver
- *receiver = to!(typeof(*receiver))(val);
+ *receiver = getoptTo!(Target)(option, val, i);
}
- else static if (is(typeof(receiver) == delegate) ||
- is(typeof(*receiver) == function))
+ else static if (is(R == delegate) ||
+ is(Target == function))
{
static if (is(typeof(receiver("", "")) : void))
{
@@ -952,29 +958,25 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
receiver();
}
}
- else static if (isArray!(typeof(*receiver)))
+ else static if (isArray!(Target))
{
// array receiver
import std.range : ElementEncodingType;
- alias E = ElementEncodingType!(typeof(*receiver));
+ alias E = ElementEncodingType!(Target);
if (arraySep == "")
{
- E tmp;
- handleConversion(option, val, &tmp, i);
- *receiver ~= tmp;
+ *receiver ~= getoptTo!E(option, val, i);
}
else
{
foreach (elem; val.splitter(arraySep))
{
- E tmp;
- handleConversion(option, elem, &tmp, i);
- *receiver ~= tmp;
+ *receiver ~= getoptTo!E(option, elem, i);
}
}
}
- else static if (isAssociativeArray!(typeof(*receiver)))
+ else static if (isAssociativeArray!(Target))
{
// hash receiver
alias K = typeof(receiver.keys[0]);
@@ -991,14 +993,7 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
~ to!string(assignChar) ~ "' in argument '" ~ input ~ "'.");
auto key = input[0 .. j];
auto value = input[j + 1 .. $];
-
- K k;
- handleConversion("", key, &k, 0);
-
- V v;
- handleConversion("", value, &v, 0);
-
- return tuple(k,v);
+ return tuple(getoptTo!K("", key, 0), getoptTo!V("", value, 0));
}
static void setHash(Range)(R receiver, Range range)
@@ -1013,7 +1008,7 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
setHash(receiver, val.splitter(arraySep));
}
else
- static assert(false, "getopt does not know how to handle the type " ~ typeof(receiver).stringof);
+ static assert(false, "getopt does not know how to handle the type " ~ R.stringof);
}
}
@@ -1099,6 +1094,18 @@ private bool handleOption(R)(string option, R receiver, ref string[] args,
assert(values == ["foo":0, "bar":1, "baz":2], to!string(values));
}
+// https://github.com/dlang/phobos/issues/10680
+@safe unittest
+{
+ arraySep = ",";
+ scope(exit) arraySep = "";
+ const(string)[] s;
+ string[] args = ["program.name", "-s", "a", "-s", "b", "-s", "c,d,e"];
+ getopt(args, "values|s", &s);
+ assert(s == ["a", "b", "c", "d", "e"]);
+}
+
+
/**
The option character (default '-').
diff --git a/libphobos/src/std/json.d b/libphobos/src/std/json.d
index 7182f6e..eb08de8 100644
--- a/libphobos/src/std/json.d
+++ b/libphobos/src/std/json.d
@@ -562,8 +562,7 @@ struct JSONValue
else static if (is(T : string))
{
type_tag = JSONType.string;
- string t = arg;
- () @trusted { store.str = t; }();
+ store = Store(str: arg);
}
// https://issues.dlang.org/show_bug.cgi?id=15884
else static if (isSomeString!T)
@@ -572,7 +571,7 @@ struct JSONValue
// FIXME: std.Array.Array(Range) is not deduced as 'pure'
() @trusted {
import std.utf : byUTF;
- store.str = cast(immutable)(arg.byUTF!char.array);
+ store = Store(str: cast(immutable)(arg.byUTF!char.array));
}();
}
else static if (is(T : bool))
@@ -582,17 +581,17 @@ struct JSONValue
else static if (is(T : ulong) && isUnsigned!T)
{
type_tag = JSONType.uinteger;
- store.uinteger = arg;
+ store = Store(uinteger: arg);
}
else static if (is(T : long))
{
type_tag = JSONType.integer;
- store.integer = arg;
+ store = Store(integer: arg);
}
else static if (isFloatingPoint!T)
{
type_tag = JSONType.float_;
- store.floating = arg;
+ store = Store(floating: arg);
}
else static if (is(T : Value[Key], Key, Value))
{
@@ -600,45 +599,34 @@ struct JSONValue
type_tag = JSONType.object;
static if (is(Value : JSONValue))
{
- JSONValue[string] t = arg;
- () @trusted {
- store.object.isOrdered = false;
- store.object.unordered = t;
- }();
+ store = Store(object: Store.Object(false, unordered: arg));
}
else
{
JSONValue[string] aa;
foreach (key, value; arg)
aa[key] = JSONValue(value);
- () @trusted {
- store.object.isOrdered = false;
- store.object.unordered = aa;
- }();
+ store = Store(object: Store.Object(false, unordered: aa));
}
}
else static if (is(T : OrderedObjectMember[]))
{
type_tag = JSONType.object;
- () @trusted {
- store.object.isOrdered = true;
- store.object.ordered = arg;
- }();
+ store = Store(object: Store.Object(true, ordered: arg));
}
else static if (isArray!T)
{
type_tag = JSONType.array;
static if (is(ElementEncodingType!T : JSONValue))
{
- JSONValue[] t = arg;
- () @trusted { store.array = t; }();
+ store = Store(array: arg);
}
else
{
JSONValue[] new_arg = new JSONValue[arg.length];
foreach (i, e; arg)
new_arg[i] = JSONValue(e);
- () @trusted { store.array = new_arg; }();
+ store = Store(array: new_arg);
}
}
else static if (is(T : JSONValue))
@@ -658,14 +646,14 @@ struct JSONValue
type_tag = JSONType.array;
static if (is(ElementEncodingType!T : JSONValue))
{
- store.array = arg;
+ store = Store(array: arg);
}
else
{
JSONValue[] new_arg = new JSONValue[arg.length];
foreach (i, e; arg)
new_arg[i] = JSONValue(e);
- store.array = new_arg;
+ store = Store(array: new_arg);
}
}
@@ -1616,13 +1604,13 @@ if (isSomeFiniteCharInputRange!T)
if (isFloat)
{
value.type_tag = JSONType.float_;
- value.store.floating = parse!double(data);
+ value.store = JSONValue.Store(floating: parse!double(data));
}
else
{
if (isNegative)
{
- value.store.integer = parse!long(data);
+ value.store = JSONValue.Store(integer: parse!long(data));
value.type_tag = JSONType.integer;
}
else
@@ -1631,12 +1619,12 @@ if (isSomeFiniteCharInputRange!T)
ulong u = parse!ulong(data);
if (u & (1UL << 63))
{
- value.store.uinteger = u;
+ value.store = JSONValue.Store(uinteger: u);
value.type_tag = JSONType.uinteger;
}
else
{
- value.store.integer = u;
+ value.store = JSONValue.Store(integer: u);
value.type_tag = JSONType.integer;
}
}
diff --git a/libstdc++-v3/ChangeLog b/libstdc++-v3/ChangeLog
index 224b2ad..7518728 100644
--- a/libstdc++-v3/ChangeLog
+++ b/libstdc++-v3/ChangeLog
@@ -1,3 +1,68 @@
+2025-03-18 Tomasz Kamiński <tkaminsk@redhat.com>
+
+ PR libstdc++/111055
+ * include/bits/unordered_map.h (unordered_map): Define from_range
+ constructors and insert_range member.
+ (unordered_multimap): Likewise.
+ * testsuite/23_containers/unordered_multimap/cons/from_range.cc:
+ New test.
+ * testsuite/23_containers/unordered_multimap/modifiers/insert_range.cc:
+ New test.
+ * testsuite/23_containers/unordered_map/cons/from_range.cc:
+ New test.
+ * testsuite/23_containers/unordered_map/modifiers/insert_range.cc:
+ New test.
+
+2025-03-18 Tomasz Kamiński <tkaminsk@redhat.com>
+
+ PR libstdc++/111055
+ * include/bits/hashtable.h (_M_rehash_insert)
+ (_M_insert_range_multi): Extracted rehashing for range insertion
+ to separate function.
+ * include/bits/unordered_set.h (unordered_set): Define from_range
+ constructors and insert_range member.
+ (unordered_multiset) Likewise.
+ * testsuite/23_containers/unordered_multiset/cons/from_range.cc:
+ New test.
+ * testsuite/23_containers/unordered_multiset/modifiers/insert_range.cc:
+ New test.
+ * testsuite/23_containers/unordered_set/cons/from_range.cc:
+ New test.
+ * testsuite/23_containers/unordered_set/modifiers/insert_range.cc:
+ New test.
+
+2025-03-18 Tomasz Kamiński <tkaminsk@redhat.com>
+
+ PR libstdc++/111055
+ * include/bits/stl_multiset.h: (inser_range)
+ (multiset(from_range_t, _Rg&&, const _Compare&, const _Alloc&))
+ (multiset(from_range_t, _Rg&&, const _Alloc&)): Define.
+ * include/bits/stl_set.h: (set(from_range_t, _Rg&&, const _Alloc&))
+ (set(from_range_t, _Rg&&, const _Compare&, const _Alloc&), insert_range):
+ Define.
+ * testsuite/23_containers/multiset/cons/from_range.cc: New test.
+ * testsuite/23_containers/multiset/modifiers/insert/insert_range.cc:
+ New test.
+ * testsuite/23_containers/set/cons/from_range.cc: New test.
+ * testsuite/23_containers/set/modifiers/insert/insert_range.cc:
+ New test.
+
+2025-03-18 Tomasz Kamiński <tkaminsk@redhat.com>
+
+ PR libstdc++/111055
+ * include/bits/ranges_base.h (__detail::__range_to_alloc_type):
+ Define.
+ * include/bits/stl_multimap.h: (inser_range)
+ (multimap(from_range_t, _Rg&&, const _Compare&, const _Alloc&))
+ (multimap(from_range_t, _Rg&&, const _Alloc&)): Define.
+ * include/bits/stl_map.h: (map(from_range_t, _Rg&&, const _Alloc&)) i
+ (map(from_range_t, _Rg&&, const _Compare&, const _Alloc&), insert_range):
+ Define.
+ * testsuite/23_containers/multimap/cons/from_range.cc: New test.
+ * testsuite/23_containers/multimap/modifiers/insert/insert_range.cc: New test.
+ * testsuite/23_containers/map/cons/from_range.cc: New test.
+ * testsuite/23_containers/map/modifiers/insert/insert_range.cc: New test.
+
2025-03-14 Patrick Palka <ppalka@redhat.com>
PR libstdc++/119282