| 1 | /* C++ modules. Experimental! |
| 2 | Copyright (C) 2017-2026 Free Software Foundation, Inc. |
| 3 | Written by Nathan Sidwell <nathan@acm.org> while at FaceBook |
| 4 | |
| 5 | This file is part of GCC. |
| 6 | |
| 7 | GCC is free software; you can redistribute it and/or modify it |
| 8 | under the terms of the GNU General Public License as published by |
| 9 | the Free Software Foundation; either version 3, or (at your option) |
| 10 | any later version. |
| 11 | |
| 12 | GCC is distributed in the hope that it will be useful, but |
| 13 | WITHOUT ANY WARRANTY; without even the implied warranty of |
| 14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 15 | General Public License for more details. |
| 16 | |
| 17 | You should have received a copy of the GNU General Public License |
| 18 | along with GCC; see the file COPYING3. If not see |
| 19 | <http://www.gnu.org/licenses/>. */ |
| 20 | |
| 21 | /* Comments in this file have a non-negligible chance of being wrong |
| 22 | or at least inaccurate. Due to (a) my misunderstanding, (b) |
| 23 | ambiguities that I have interpretted differently to original intent |
| 24 | (c) changes in the specification, (d) my poor wording, (e) source |
| 25 | changes. */ |
| 26 | |
| 27 | /* (Incomplete) Design Notes |
| 28 | |
| 29 | A hash table contains all module names. Imported modules are |
| 30 | present in a modules array, which by construction places an |
| 31 | import's dependencies before the import itself. The single |
| 32 | exception is the current TU, which always occupies slot zero (even |
| 33 | when it is not a module). |
| 34 | |
| 35 | Imported decls occupy an entity_ary, an array of binding_slots, indexed |
| 36 | by importing module and index within that module. A flat index is |
| 37 | used, as each module reserves a contiguous range of indices. |
| 38 | Initially each slot indicates the CMI section containing the |
| 39 | streamed decl. When the decl is imported it will point to the decl |
| 40 | itself. |
| 41 | |
| 42 | Additionally each imported decl is mapped in the entity_map via its |
| 43 | DECL_UID to the flat index in the entity_ary. Thus we can locate |
| 44 | the index for any imported decl by using this map and then |
| 45 | de-flattening the index via a binary seach of the module vector. |
| 46 | Cross-module references are by (remapped) module number and |
| 47 | module-local index. |
| 48 | |
| 49 | Each importable DECL contains several flags. The simple set are |
| 50 | DECL_MODULE_EXPORT_P, DECL_MODULE_PURVIEW_P, DECL_MODULE_ATTACH_P |
| 51 | and DECL_MODULE_IMPORT_P. The first indicates whether it is |
| 52 | exported, the second whether it is in module or header-unit |
| 53 | purview. The third indicates it is attached to the named module in |
| 54 | whose purview it resides and the fourth indicates whether it was an |
| 55 | import into this TU or not. DECL_MODULE_ATTACH_P will be false for |
| 56 | all decls in a header-unit, and for those in a named module inside |
| 57 | a linkage declaration. |
| 58 | |
| 59 | The more detailed flags are DECL_MODULE_PARTITION_P, |
| 60 | DECL_MODULE_ENTITY_P. The first is set in a primary interface unit |
| 61 | on decls that were read from module partitions (these will have |
| 62 | DECL_MODULE_IMPORT_P set too). Such decls will be streamed out to |
| 63 | the primary's CMI. DECL_MODULE_ENTITY_P is set when an entity is |
| 64 | imported, even if it matched a non-imported entity. Such a decl |
| 65 | will not have DECL_MODULE_IMPORT_P set, even though it has an entry |
| 66 | in the entity map and array. |
| 67 | |
| 68 | Header units are module-like. |
| 69 | |
| 70 | For namespace-scope lookup, the decls for a particular module are |
| 71 | held located in a sparse array hanging off the binding of the name. |
| 72 | This is partitioned into two: a few fixed slots at the start |
| 73 | followed by the sparse slots afterwards. By construction we only |
| 74 | need to append new slots to the end -- there is never a need to |
| 75 | insert in the middle. The fixed slots are MODULE_SLOT_CURRENT for |
| 76 | the current TU (regardless of whether it is a module or not), |
| 77 | MODULE_SLOT_GLOBAL and MODULE_SLOT_PARTITION. These latter two |
| 78 | slots are used for merging entities across the global module and |
| 79 | module partitions respectively. MODULE_SLOT_PARTITION is only |
| 80 | present in a module. Neither of those two slots is searched during |
| 81 | name lookup -- they are internal use only. This vector is created |
| 82 | lazily once we require it, if there is only a declaration from the |
| 83 | current TU, a regular binding is present. It is converted on |
| 84 | demand. |
| 85 | |
| 86 | OPTIMIZATION: Outside of the current TU, we only need ADL to work. |
| 87 | We could optimize regular lookup for the current TU by glomming all |
| 88 | the visible decls on its slot. Perhaps wait until design is a |
| 89 | little more settled though. |
| 90 | |
| 91 | There is only one instance of each extern-linkage namespace. It |
| 92 | appears in every module slot that makes it visible. It also |
| 93 | appears in MODULE_SLOT_GLOBAL. (It is an ODR violation if they |
| 94 | collide with some other global module entity.) We also have an |
| 95 | optimization that shares the slot for adjacent modules that declare |
| 96 | the same such namespace. |
| 97 | |
| 98 | A module interface compilation produces a Compiled Module Interface |
| 99 | (CMI). The format used is Encapsulated Lazy Records Of Numbered |
| 100 | Declarations, which is essentially ELF's section encapsulation. (As |
| 101 | all good nerds are aware, Elrond is half Elf.) Some sections are |
| 102 | named, and contain information about the module as a whole (indices |
| 103 | etc), and other sections are referenced by number. Although I |
| 104 | don't defend against actively hostile CMIs, there is some |
| 105 | checksumming involved to verify data integrity. When dumping out |
| 106 | an interface, we generate a graph of all the |
| 107 | independently-redeclarable DECLS that are needed, and the decls |
| 108 | they reference. From that we determine the strongly connected |
| 109 | components (SCC) within this TU. Each SCC is dumped to a separate |
| 110 | numbered section of the CMI. We generate a binding table section, |
| 111 | mapping each namespace&name to a defining section. This allows |
| 112 | lazy loading. |
| 113 | |
| 114 | Lazy loading employs mmap to map a read-only image of the CMI. |
| 115 | It thus only occupies address space and is paged in on demand, |
| 116 | backed by the CMI file itself. If mmap is unavailable, regular |
| 117 | FILEIO is used. Also, there's a bespoke ELF reader/writer here, |
| 118 | which implements just the section table and sections (including |
| 119 | string sections) of a 32-bit ELF in host byte-order. You can of |
| 120 | course inspect it with readelf. I figured 32-bit is sufficient, |
| 121 | for a single module. I detect running out of section numbers, but |
| 122 | do not implement the ELF overflow mechanism. At least you'll get |
| 123 | an error if that happens. |
| 124 | |
| 125 | We do not separate declarations and definitions. My guess is that |
| 126 | if you refer to the declaration, you'll also need the definition |
| 127 | (template body, inline function, class definition etc). But this |
| 128 | does mean we can get larger SCCs than if we separated them. It is |
| 129 | unclear whether this is a win or not. |
| 130 | |
| 131 | Notice that we embed section indices into the contents of other |
| 132 | sections. Thus random manipulation of the CMI file by ELF tools |
| 133 | may well break it. The kosher way would probably be to introduce |
| 134 | indirection via section symbols, but that would require defining a |
| 135 | relocation type. |
| 136 | |
| 137 | Notice that lazy loading of one module's decls can cause lazy |
| 138 | loading of other decls in the same or another module. Clearly we |
| 139 | want to avoid loops. In a correct program there can be no loops in |
| 140 | the module dependency graph, and the above-mentioned SCC algorithm |
| 141 | places all intra-module circular dependencies in the same SCC. It |
| 142 | also orders the SCCs wrt each other, so dependent SCCs come first. |
| 143 | As we load dependent modules first, we know there can be no |
| 144 | reference to a higher-numbered module, and because we write out |
| 145 | dependent SCCs first, likewise for SCCs within the module. This |
| 146 | allows us to immediately detect broken references. When loading, |
| 147 | we must ensure the rest of the compiler doesn't cause some |
| 148 | unconnected load to occur (for instance, instantiate a template). |
| 149 | |
| 150 | Classes used: |
| 151 | |
| 152 | dumper - logger |
| 153 | |
| 154 | data - buffer |
| 155 | |
| 156 | bytes_in : data - scalar reader |
| 157 | bytes_out : data - scalar writer |
| 158 | |
| 159 | bytes_in::bits_in - bit stream reader |
| 160 | bytes_out::bits_out - bit stream writer |
| 161 | |
| 162 | elf - ELROND format |
| 163 | elf_in : elf - ELROND reader |
| 164 | elf_out : elf - ELROND writer |
| 165 | |
| 166 | trees_in : bytes_in - tree reader |
| 167 | trees_out : bytes_out - tree writer |
| 168 | |
| 169 | depset - dependency set |
| 170 | depset::hash - hash table of depsets |
| 171 | depset::tarjan - SCC determinator |
| 172 | |
| 173 | uidset<T> - set T's related to a UID |
| 174 | uidset<T>::hash hash table of uidset<T> |
| 175 | |
| 176 | loc_spans - location map data |
| 177 | |
| 178 | module_state - module object |
| 179 | |
| 180 | slurping - data needed during loading |
| 181 | |
| 182 | macro_import - imported macro data |
| 183 | macro_export - exported macro data |
| 184 | |
| 185 | The ELROND objects use mmap, for both reading and writing. If mmap |
| 186 | is unavailable, fileno IO is used to read and write blocks of data. |
| 187 | |
| 188 | The mapper object uses fileno IO to communicate with the server or |
| 189 | program. */ |
| 190 | |
| 191 | /* In expermental (trunk) sources, MODULE_VERSION is a #define passed |
| 192 | in from the Makefile. It records the modification date of the |
| 193 | source directory -- that's the only way to stay sane. In release |
| 194 | sources, we (plan to) use the compiler's major.minor versioning. |
| 195 | While the format might not change between at minor versions, it |
| 196 | seems simplest to tie the two together. There's no concept of |
| 197 | inter-version compatibility. */ |
| 198 | #define IS_EXPERIMENTAL(V) ((V) >= (1U << 20)) |
| 199 | #define MODULE_MAJOR(V) ((V) / 10000) |
| 200 | #define MODULE_MINOR(V) ((V) % 10000) |
| 201 | #define EXPERIMENT(A,B) (IS_EXPERIMENTAL (MODULE_VERSION) ? (A) : (B)) |
| 202 | #ifndef MODULE_VERSION |
| 203 | #include "bversion.h" |
| 204 | #define MODULE_VERSION (BUILDING_GCC_MAJOR * 10000U + BUILDING_GCC_MINOR) |
| 205 | #elif !IS_EXPERIMENTAL (MODULE_VERSION) |
| 206 | #error "This is not the version I was looking for." |
| 207 | #endif |
| 208 | |
| 209 | #define _DEFAULT_SOURCE 1 /* To get TZ field of struct tm, if available. */ |
| 210 | #include "config.h" |
| 211 | #define INCLUDE_STRING |
| 212 | #define INCLUDE_VECTOR |
| 213 | #include "system.h" |
| 214 | #include "coretypes.h" |
| 215 | #include "cp-tree.h" |
| 216 | #include "timevar.h" |
| 217 | #include "stringpool.h" |
| 218 | #include "dumpfile.h" |
| 219 | #include "bitmap.h" |
| 220 | #include "cgraph.h" |
| 221 | #include "varasm.h" |
| 222 | #include "tree-iterator.h" |
| 223 | #include "cpplib.h" |
| 224 | #include "mkdeps.h" |
| 225 | #include "incpath.h" |
| 226 | #include "libiberty.h" |
| 227 | #include "stor-layout.h" |
| 228 | #include "version.h" |
| 229 | #include "tree-diagnostic.h" |
| 230 | #include "toplev.h" |
| 231 | #include "opts.h" |
| 232 | #include "attribs.h" |
| 233 | #include "intl.h" |
| 234 | #include "langhooks.h" |
| 235 | #include "contracts.h" |
| 236 | /* This TU doesn't need or want to see the networking. */ |
| 237 | #define CODY_NETWORKING 0 |
| 238 | #include "mapper-client.h" |
| 239 | #include <zlib.h> // for crc32, crc32_combine |
| 240 | |
| 241 | #if 0 // 1 for testing no mmap |
| 242 | #define MAPPED_READING 0 |
| 243 | #define MAPPED_WRITING 0 |
| 244 | #else |
| 245 | #if HAVE_MMAP_FILE && HAVE_MUNMAP && HAVE_MSYNC |
| 246 | /* mmap, munmap, msync. */ |
| 247 | #define MAPPED_READING 1 |
| 248 | #if HAVE_SYSCONF && defined (_SC_PAGE_SIZE) |
| 249 | /* sysconf (_SC_PAGE_SIZE), ftruncate */ |
| 250 | /* posix_fallocate used if available. */ |
| 251 | #define MAPPED_WRITING 1 |
| 252 | #else |
| 253 | #define MAPPED_WRITING 0 |
| 254 | #endif |
| 255 | #else |
| 256 | #define MAPPED_READING 0 |
| 257 | #define MAPPED_WRITING 0 |
| 258 | #endif |
| 259 | #endif |
| 260 | |
| 261 | /* Some open(2) flag differences, what a colourful world it is! */ |
| 262 | #if defined (O_CLOEXEC) |
| 263 | // OK |
| 264 | #elif defined (_O_NOINHERIT) |
| 265 | /* Windows' _O_NOINHERIT matches O_CLOEXEC flag */ |
| 266 | #define O_CLOEXEC _O_NOINHERIT |
| 267 | #else |
| 268 | #define O_CLOEXEC 0 |
| 269 | #endif |
| 270 | #if defined (O_BINARY) |
| 271 | // Ok? |
| 272 | #elif defined (_O_BINARY) |
| 273 | /* Windows' open(2) call defaults to text! */ |
| 274 | #define O_BINARY _O_BINARY |
| 275 | #else |
| 276 | #define O_BINARY 0 |
| 277 | #endif |
| 278 | |
| 279 | static inline cpp_hashnode *cpp_node (tree id) |
| 280 | { |
| 281 | return CPP_HASHNODE (GCC_IDENT_TO_HT_IDENT (id)); |
| 282 | } |
| 283 | |
| 284 | static inline tree identifier (const cpp_hashnode *node) |
| 285 | { |
| 286 | /* HT_NODE() expands to node->ident that HT_IDENT_TO_GCC_IDENT() |
| 287 | then subtracts a nonzero constant, deriving a pointer to |
| 288 | a different member than ident. That's strictly undefined |
| 289 | and detected by -Warray-bounds. Suppress it. See PR 101372. */ |
| 290 | #pragma GCC diagnostic push |
| 291 | #pragma GCC diagnostic ignored "-Warray-bounds" |
| 292 | return HT_IDENT_TO_GCC_IDENT (HT_NODE (const_cast<cpp_hashnode *> (node))); |
| 293 | #pragma GCC diagnostic pop |
| 294 | } |
| 295 | |
| 296 | /* Id for dumping module information. */ |
| 297 | int module_dump_id; |
| 298 | |
| 299 | /* We have a special module owner. */ |
| 300 | #define MODULE_UNKNOWN (~0U) /* Not yet known. */ |
| 301 | |
| 302 | /* Prefix for section names. */ |
| 303 | #define MOD_SNAME_PFX ".gnu.c++" |
| 304 | |
| 305 | /* Format a version for user consumption. */ |
| 306 | |
| 307 | typedef char verstr_t[32]; |
| 308 | static void |
| 309 | version2string (unsigned version, verstr_t &out) |
| 310 | { |
| 311 | unsigned major = MODULE_MAJOR (version); |
| 312 | unsigned minor = MODULE_MINOR (version); |
| 313 | |
| 314 | if (IS_EXPERIMENTAL (version)) |
| 315 | sprintf (s: out, format: "%04u/%02u/%02u-%02u:%02u%s" , |
| 316 | 2000 + major / 10000, (major / 100) % 100, (major % 100), |
| 317 | minor / 100, minor % 100, |
| 318 | EXPERIMENT ("" , " (experimental)" )); |
| 319 | else |
| 320 | sprintf (s: out, format: "%u.%u" , major, minor); |
| 321 | } |
| 322 | |
| 323 | /* Include files to note translation for. */ |
| 324 | static vec<const char *, va_heap, vl_embed> *note_includes; |
| 325 | |
| 326 | /* Modules to note CMI pathames. */ |
| 327 | static vec<const char *, va_heap, vl_embed> *note_cmis; |
| 328 | |
| 329 | /* Traits to hash an arbitrary pointer. Entries are not deletable, |
| 330 | and removal is a noop (removal needed upon destruction). */ |
| 331 | template <typename T> |
| 332 | struct nodel_ptr_hash : pointer_hash<T>, typed_noop_remove <T *> { |
| 333 | /* Nothing is deletable. Everything is insertable. */ |
| 334 | static bool is_deleted (T *) { return false; } |
| 335 | static void mark_deleted (T *) { gcc_unreachable (); } |
| 336 | }; |
| 337 | |
| 338 | /* Map from pointer to signed integer. */ |
| 339 | typedef simple_hashmap_traits<nodel_ptr_hash<void>, int> ptr_int_traits; |
| 340 | typedef hash_map<void *,signed,ptr_int_traits> ptr_int_hash_map; |
| 341 | |
| 342 | /********************************************************************/ |
| 343 | /* Basic streaming & ELF. Serialization is usually via mmap. For |
| 344 | writing we slide a buffer over the output file, syncing it |
| 345 | approproiately. For reading we simply map the whole file (as a |
| 346 | file-backed read-only map -- it's just address space, leaving the |
| 347 | OS pager to deal with getting the data to us). Some buffers need |
| 348 | to be more conventional malloc'd contents. */ |
| 349 | |
| 350 | /* Variable length buffer. */ |
| 351 | |
| 352 | namespace { |
| 353 | |
| 354 | constexpr line_map_uint_t loc_one = 1; |
| 355 | |
| 356 | class data { |
| 357 | public: |
| 358 | class allocator { |
| 359 | public: |
| 360 | /* Tools tend to moan if the dtor's not virtual. */ |
| 361 | virtual ~allocator () {} |
| 362 | |
| 363 | public: |
| 364 | void grow (data &obj, unsigned needed, bool exact); |
| 365 | void shrink (data &obj); |
| 366 | |
| 367 | public: |
| 368 | virtual char *grow (char *ptr, unsigned needed); |
| 369 | virtual void shrink (char *ptr); |
| 370 | }; |
| 371 | |
| 372 | public: |
| 373 | char *buffer; /* Buffer being transferred. */ |
| 374 | /* Although size_t would be the usual size, we know we never get |
| 375 | more than 4GB of buffer -- because that's the limit of the |
| 376 | encapsulation format. And if you need bigger imports, you're |
| 377 | doing it wrong. */ |
| 378 | unsigned size; /* Allocated size of buffer. */ |
| 379 | unsigned pos; /* Position in buffer. */ |
| 380 | |
| 381 | public: |
| 382 | data () |
| 383 | :buffer (NULL), size (0), pos (0) |
| 384 | { |
| 385 | } |
| 386 | ~data () |
| 387 | { |
| 388 | /* Make sure the derived and/or using class know what they're |
| 389 | doing. */ |
| 390 | gcc_checking_assert (!buffer); |
| 391 | } |
| 392 | |
| 393 | protected: |
| 394 | char *use (unsigned count) |
| 395 | { |
| 396 | if (size < pos + count) |
| 397 | return NULL; |
| 398 | char *res = &buffer[pos]; |
| 399 | pos += count; |
| 400 | return res; |
| 401 | } |
| 402 | |
| 403 | unsigned calc_crc (unsigned) const; |
| 404 | |
| 405 | public: |
| 406 | void unuse (unsigned count) |
| 407 | { |
| 408 | pos -= count; |
| 409 | } |
| 410 | |
| 411 | public: |
| 412 | static allocator simple_memory; |
| 413 | }; |
| 414 | } // anon namespace |
| 415 | |
| 416 | /* The simple data allocator. */ |
| 417 | data::allocator data::simple_memory; |
| 418 | |
| 419 | /* Grow buffer to at least size NEEDED. */ |
| 420 | |
| 421 | void |
| 422 | data::allocator::grow (data &obj, unsigned needed, bool exact) |
| 423 | { |
| 424 | gcc_checking_assert (needed ? needed > obj.size : !obj.size); |
| 425 | if (!needed) |
| 426 | /* Pick a default size. */ |
| 427 | needed = EXPERIMENT (100, 1000); |
| 428 | |
| 429 | if (!exact) |
| 430 | needed *= 2; |
| 431 | obj.buffer = grow (ptr: obj.buffer, needed); |
| 432 | if (obj.buffer) |
| 433 | obj.size = needed; |
| 434 | else |
| 435 | obj.pos = obj.size = 0; |
| 436 | } |
| 437 | |
| 438 | /* Free a buffer. */ |
| 439 | |
| 440 | void |
| 441 | data::allocator::shrink (data &obj) |
| 442 | { |
| 443 | shrink (ptr: obj.buffer); |
| 444 | obj.buffer = NULL; |
| 445 | obj.size = 0; |
| 446 | } |
| 447 | |
| 448 | char * |
| 449 | data::allocator::grow (char *ptr, unsigned needed) |
| 450 | { |
| 451 | return XRESIZEVAR (char, ptr, needed); |
| 452 | } |
| 453 | |
| 454 | void |
| 455 | data::allocator::shrink (char *ptr) |
| 456 | { |
| 457 | XDELETEVEC (ptr); |
| 458 | } |
| 459 | |
| 460 | /* Calculate the crc32 of the buffer. Note the CRC is stored in the |
| 461 | first 4 bytes, so don't include them. */ |
| 462 | |
| 463 | unsigned |
| 464 | data::calc_crc (unsigned l) const |
| 465 | { |
| 466 | return crc32 (crc: 0, buf: (unsigned char *)buffer + 4, len: l - 4); |
| 467 | } |
| 468 | |
| 469 | class elf_in; |
| 470 | |
| 471 | /* Byte stream reader. */ |
| 472 | |
| 473 | namespace { |
| 474 | class bytes_in : public data { |
| 475 | typedef data parent; |
| 476 | |
| 477 | protected: |
| 478 | bool overrun; /* Sticky read-too-much flag. */ |
| 479 | |
| 480 | public: |
| 481 | bytes_in () |
| 482 | : parent (), overrun (false) |
| 483 | { |
| 484 | } |
| 485 | ~bytes_in () |
| 486 | { |
| 487 | } |
| 488 | |
| 489 | public: |
| 490 | /* Begin reading a named section. */ |
| 491 | bool begin (location_t loc, elf_in *src, const char *name); |
| 492 | /* Begin reading a numbered section with optional name. */ |
| 493 | bool begin (location_t loc, elf_in *src, unsigned, const char * = NULL); |
| 494 | /* Complete reading a buffer. Propagate errors and return true on |
| 495 | success. */ |
| 496 | bool end (elf_in *src); |
| 497 | /* Return true if there is unread data. */ |
| 498 | bool more_p () const |
| 499 | { |
| 500 | return pos != size; |
| 501 | } |
| 502 | |
| 503 | public: |
| 504 | /* Start reading at OFFSET. */ |
| 505 | void random_access (unsigned offset) |
| 506 | { |
| 507 | if (offset > size) |
| 508 | set_overrun (); |
| 509 | pos = offset; |
| 510 | } |
| 511 | |
| 512 | public: |
| 513 | void align (unsigned boundary) |
| 514 | { |
| 515 | if (unsigned pad = pos & (boundary - 1)) |
| 516 | read (count: boundary - pad); |
| 517 | } |
| 518 | |
| 519 | public: |
| 520 | const char *read (unsigned count) |
| 521 | { |
| 522 | char *ptr = use (count); |
| 523 | if (!ptr) |
| 524 | set_overrun (); |
| 525 | return ptr; |
| 526 | } |
| 527 | |
| 528 | public: |
| 529 | bool check_crc () const; |
| 530 | /* We store the CRC in the first 4 bytes, using host endianness. */ |
| 531 | unsigned get_crc () const |
| 532 | { |
| 533 | return *(const unsigned *)&buffer[0]; |
| 534 | } |
| 535 | |
| 536 | public: |
| 537 | /* Manipulate the overrun flag. */ |
| 538 | bool get_overrun () const |
| 539 | { |
| 540 | return overrun; |
| 541 | } |
| 542 | void set_overrun () |
| 543 | { |
| 544 | overrun = true; |
| 545 | } |
| 546 | |
| 547 | public: |
| 548 | unsigned u32 (); /* Read uncompressed integer. */ |
| 549 | |
| 550 | public: |
| 551 | int c () ATTRIBUTE_UNUSED; /* Read a char. */ |
| 552 | int i (); /* Read a signed int. */ |
| 553 | unsigned u (); /* Read an unsigned int. */ |
| 554 | size_t z (); /* Read a size_t. */ |
| 555 | location_t loc (); /* Read a location_t. */ |
| 556 | HOST_WIDE_INT wi (); /* Read a HOST_WIDE_INT. */ |
| 557 | unsigned HOST_WIDE_INT wu (); /* Read an unsigned HOST_WIDE_INT. */ |
| 558 | const char *str (size_t * = NULL); /* Read a string. */ |
| 559 | const void *buf (size_t); /* Read a fixed-length buffer. */ |
| 560 | cpp_hashnode *cpp_node (); /* Read a cpp node. */ |
| 561 | |
| 562 | struct bits_in; |
| 563 | bits_in stream_bits (); |
| 564 | }; |
| 565 | } // anon namespace |
| 566 | |
| 567 | /* Verify the buffer's CRC is correct. */ |
| 568 | |
| 569 | bool |
| 570 | bytes_in::check_crc () const |
| 571 | { |
| 572 | if (size < 4) |
| 573 | return false; |
| 574 | |
| 575 | unsigned c_crc = calc_crc (l: size); |
| 576 | if (c_crc != get_crc ()) |
| 577 | return false; |
| 578 | |
| 579 | return true; |
| 580 | } |
| 581 | |
| 582 | class elf_out; |
| 583 | |
| 584 | /* Byte stream writer. */ |
| 585 | |
| 586 | namespace { |
| 587 | class bytes_out : public data { |
| 588 | typedef data parent; |
| 589 | |
| 590 | public: |
| 591 | allocator *memory; /* Obtainer of memory. */ |
| 592 | |
| 593 | public: |
| 594 | bytes_out (allocator *memory) |
| 595 | : parent (), memory (memory) |
| 596 | { |
| 597 | } |
| 598 | ~bytes_out () |
| 599 | { |
| 600 | } |
| 601 | |
| 602 | public: |
| 603 | bool streaming_p () const |
| 604 | { |
| 605 | return memory != NULL; |
| 606 | } |
| 607 | |
| 608 | public: |
| 609 | void set_crc (unsigned *crc_ptr); |
| 610 | |
| 611 | public: |
| 612 | /* Begin writing, maybe reserve space for CRC. */ |
| 613 | void begin (bool need_crc = true); |
| 614 | /* Finish writing. Spill to section by number. */ |
| 615 | unsigned end (elf_out *, unsigned, unsigned *crc_ptr = NULL); |
| 616 | |
| 617 | public: |
| 618 | void align (unsigned boundary) |
| 619 | { |
| 620 | if (unsigned pad = pos & (boundary - 1)) |
| 621 | write (count: boundary - pad); |
| 622 | } |
| 623 | |
| 624 | public: |
| 625 | char *write (unsigned count, bool exact = false) |
| 626 | { |
| 627 | if (size < pos + count) |
| 628 | memory->grow (obj&: *this, needed: pos + count, exact); |
| 629 | return use (count); |
| 630 | } |
| 631 | |
| 632 | public: |
| 633 | void u32 (unsigned); /* Write uncompressed integer. */ |
| 634 | |
| 635 | public: |
| 636 | void c (unsigned char) ATTRIBUTE_UNUSED; /* Write unsigned char. */ |
| 637 | void i (int); /* Write signed int. */ |
| 638 | void u (unsigned); /* Write unsigned int. */ |
| 639 | void z (size_t s); /* Write size_t. */ |
| 640 | void loc (location_t); /* Write location_t. */ |
| 641 | void wi (HOST_WIDE_INT); /* Write HOST_WIDE_INT. */ |
| 642 | void wu (unsigned HOST_WIDE_INT); /* Write unsigned HOST_WIDE_INT. */ |
| 643 | void str (const char *ptr) |
| 644 | { |
| 645 | str (ptr, strlen (s: ptr)); |
| 646 | } |
| 647 | void cpp_node (const cpp_hashnode *node) |
| 648 | { |
| 649 | str ((const char *)NODE_NAME (node), NODE_LEN (node)); |
| 650 | } |
| 651 | void str (const char *, size_t); /* Write string of known length. */ |
| 652 | void buf (const void *, size_t); /* Write fixed length buffer. */ |
| 653 | void *buf (size_t); /* Create a writable buffer */ |
| 654 | |
| 655 | struct bits_out; |
| 656 | bits_out stream_bits (); |
| 657 | |
| 658 | public: |
| 659 | /* Format a NUL-terminated raw string. */ |
| 660 | void printf (const char *, ...) ATTRIBUTE_PRINTF_2; |
| 661 | void print_time (const char *, const tm *, const char *); |
| 662 | |
| 663 | public: |
| 664 | /* Dump instrumentation. */ |
| 665 | static void instrument (); |
| 666 | |
| 667 | protected: |
| 668 | /* Instrumentation. */ |
| 669 | static unsigned spans[4]; |
| 670 | static unsigned lengths[4]; |
| 671 | }; |
| 672 | } // anon namespace |
| 673 | |
| 674 | /* Finish bit packet. Rewind the bytes not used. */ |
| 675 | |
| 676 | static unsigned |
| 677 | bit_flush (data& bits, uint32_t& bit_val, unsigned& bit_pos) |
| 678 | { |
| 679 | gcc_assert (bit_pos); |
| 680 | unsigned bytes = (bit_pos + 7) / 8; |
| 681 | bits.unuse (count: 4 - bytes); |
| 682 | bit_pos = 0; |
| 683 | bit_val = 0; |
| 684 | return bytes; |
| 685 | } |
| 686 | |
| 687 | /* Bit stream reader (RAII-enabled). Bools are packed into bytes. You |
| 688 | cannot mix bools and non-bools. Use bflush to flush the current stream |
| 689 | of bools on demand. Upon destruction bflush is called. |
| 690 | |
| 691 | When reading, we don't know how many bools we'll read in. So read |
| 692 | 4 bytes-worth, and then rewind when flushing if we didn't need them |
| 693 | all. You can't have a block of bools closer than 4 bytes to the |
| 694 | end of the buffer. |
| 695 | |
| 696 | Both bits_in and bits_out maintain the necessary state for bit packing, |
| 697 | and since these objects are locally constructed the compiler can more |
| 698 | easily track their state across consecutive reads/writes and optimize |
| 699 | away redundant buffering checks. */ |
| 700 | |
| 701 | struct bytes_in::bits_in { |
| 702 | bytes_in& in; |
| 703 | uint32_t bit_val = 0; |
| 704 | unsigned bit_pos = 0; |
| 705 | |
| 706 | bits_in (bytes_in& in) |
| 707 | : in (in) |
| 708 | { } |
| 709 | |
| 710 | ~bits_in () |
| 711 | { |
| 712 | bflush (); |
| 713 | } |
| 714 | |
| 715 | bits_in(bits_in&&) = default; |
| 716 | bits_in(const bits_in&) = delete; |
| 717 | bits_in& operator=(const bits_in&) = delete; |
| 718 | |
| 719 | /* Completed a block of bools. */ |
| 720 | void bflush () |
| 721 | { |
| 722 | if (bit_pos) |
| 723 | bit_flush (bits&: in, bit_val, bit_pos); |
| 724 | } |
| 725 | |
| 726 | /* Read one bit. */ |
| 727 | bool b () |
| 728 | { |
| 729 | if (!bit_pos) |
| 730 | bit_val = in.u32 (); |
| 731 | bool x = (bit_val >> bit_pos) & 1; |
| 732 | bit_pos = (bit_pos + 1) % 32; |
| 733 | return x; |
| 734 | } |
| 735 | }; |
| 736 | |
| 737 | /* Factory function for bits_in. */ |
| 738 | |
| 739 | bytes_in::bits_in |
| 740 | bytes_in::stream_bits () |
| 741 | { |
| 742 | return bits_in (*this); |
| 743 | } |
| 744 | |
| 745 | /* Bit stream writer (RAII-enabled), counterpart to bits_in. */ |
| 746 | |
| 747 | struct bytes_out::bits_out { |
| 748 | bytes_out& out; |
| 749 | uint32_t bit_val = 0; |
| 750 | unsigned bit_pos = 0; |
| 751 | char is_set = -1; |
| 752 | |
| 753 | bits_out (bytes_out& out) |
| 754 | : out (out) |
| 755 | { } |
| 756 | |
| 757 | ~bits_out () |
| 758 | { |
| 759 | bflush (); |
| 760 | } |
| 761 | |
| 762 | bits_out(bits_out&&) = default; |
| 763 | bits_out(const bits_out&) = delete; |
| 764 | bits_out& operator=(const bits_out&) = delete; |
| 765 | |
| 766 | /* Completed a block of bools. */ |
| 767 | void bflush () |
| 768 | { |
| 769 | if (bit_pos) |
| 770 | { |
| 771 | out.u32 (bit_val); |
| 772 | out.lengths[2] += bit_flush (bits&: out, bit_val, bit_pos); |
| 773 | } |
| 774 | out.spans[2]++; |
| 775 | is_set = -1; |
| 776 | } |
| 777 | |
| 778 | /* Write one bit. |
| 779 | |
| 780 | It may be worth optimizing for most bools being zero. Some kind of |
| 781 | run-length encoding? */ |
| 782 | void b (bool x) |
| 783 | { |
| 784 | if (is_set != x) |
| 785 | { |
| 786 | is_set = x; |
| 787 | out.spans[x]++; |
| 788 | } |
| 789 | out.lengths[x]++; |
| 790 | bit_val |= unsigned (x) << bit_pos++; |
| 791 | if (bit_pos == 32) |
| 792 | { |
| 793 | out.u32 (bit_val); |
| 794 | out.lengths[2] += bit_flush (bits&: out, bit_val, bit_pos); |
| 795 | } |
| 796 | } |
| 797 | }; |
| 798 | |
| 799 | /* Factory function for bits_out. */ |
| 800 | |
| 801 | bytes_out::bits_out |
| 802 | bytes_out::stream_bits () |
| 803 | { |
| 804 | return bits_out (*this); |
| 805 | } |
| 806 | |
| 807 | /* Instrumentation. */ |
| 808 | unsigned bytes_out::spans[4]; |
| 809 | unsigned bytes_out::lengths[4]; |
| 810 | |
| 811 | /* If CRC_PTR non-null, set the CRC of the buffer. Mix the CRC into |
| 812 | that pointed to by CRC_PTR. */ |
| 813 | |
| 814 | void |
| 815 | bytes_out::set_crc (unsigned *crc_ptr) |
| 816 | { |
| 817 | if (crc_ptr) |
| 818 | { |
| 819 | gcc_checking_assert (pos >= 4); |
| 820 | |
| 821 | unsigned crc = calc_crc (l: pos); |
| 822 | unsigned accum = *crc_ptr; |
| 823 | /* Only mix the existing *CRC_PTR if it is non-zero. */ |
| 824 | accum = accum ? crc32_combine (accum, crc, pos - 4) : crc; |
| 825 | *crc_ptr = accum; |
| 826 | |
| 827 | /* Buffer will be sufficiently aligned. */ |
| 828 | *(unsigned *)buffer = crc; |
| 829 | } |
| 830 | } |
| 831 | |
| 832 | /* Exactly 4 bytes. Used internally for bool packing and a few other |
| 833 | places. We can't simply use uint32_t because (a) alignment and |
| 834 | (b) we need little-endian for the bool streaming rewinding to make |
| 835 | sense. */ |
| 836 | |
| 837 | void |
| 838 | bytes_out::u32 (unsigned val) |
| 839 | { |
| 840 | if (char *ptr = write (count: 4)) |
| 841 | { |
| 842 | ptr[0] = val; |
| 843 | ptr[1] = val >> 8; |
| 844 | ptr[2] = val >> 16; |
| 845 | ptr[3] = val >> 24; |
| 846 | } |
| 847 | } |
| 848 | |
| 849 | unsigned |
| 850 | bytes_in::u32 () |
| 851 | { |
| 852 | unsigned val = 0; |
| 853 | if (const char *ptr = read (count: 4)) |
| 854 | { |
| 855 | val |= (unsigned char)ptr[0]; |
| 856 | val |= (unsigned char)ptr[1] << 8; |
| 857 | val |= (unsigned char)ptr[2] << 16; |
| 858 | val |= (unsigned char)ptr[3] << 24; |
| 859 | } |
| 860 | |
| 861 | return val; |
| 862 | } |
| 863 | |
| 864 | /* Chars are unsigned and written as single bytes. */ |
| 865 | |
| 866 | void |
| 867 | bytes_out::c (unsigned char v) |
| 868 | { |
| 869 | if (char *ptr = write (count: 1)) |
| 870 | *ptr = v; |
| 871 | } |
| 872 | |
| 873 | int |
| 874 | bytes_in::c () |
| 875 | { |
| 876 | int v = 0; |
| 877 | if (const char *ptr = read (count: 1)) |
| 878 | v = (unsigned char)ptr[0]; |
| 879 | return v; |
| 880 | } |
| 881 | |
| 882 | /* Ints 7-bit as a byte. Otherwise a 3bit count of following bytes in |
| 883 | big-endian form. 4 bits are in the first byte. */ |
| 884 | |
| 885 | void |
| 886 | bytes_out::i (int v) |
| 887 | { |
| 888 | if (char *ptr = write (count: 1)) |
| 889 | { |
| 890 | if (v <= 0x3f && v >= -0x40) |
| 891 | *ptr = v & 0x7f; |
| 892 | else |
| 893 | { |
| 894 | unsigned bytes = 0; |
| 895 | int probe; |
| 896 | if (v >= 0) |
| 897 | for (probe = v >> 8; probe > 0x7; probe >>= 8) |
| 898 | bytes++; |
| 899 | else |
| 900 | for (probe = v >> 8; probe < -0x8; probe >>= 8) |
| 901 | bytes++; |
| 902 | *ptr = 0x80 | bytes << 4 | (probe & 0xf); |
| 903 | if ((ptr = write (count: ++bytes))) |
| 904 | for (; bytes--; v >>= 8) |
| 905 | ptr[bytes] = v & 0xff; |
| 906 | } |
| 907 | } |
| 908 | } |
| 909 | |
| 910 | int |
| 911 | bytes_in::i () |
| 912 | { |
| 913 | int v = 0; |
| 914 | if (const char *ptr = read (count: 1)) |
| 915 | { |
| 916 | v = *ptr & 0xff; |
| 917 | if (v & 0x80) |
| 918 | { |
| 919 | unsigned bytes = (v >> 4) & 0x7; |
| 920 | v &= 0xf; |
| 921 | if (v & 0x8) |
| 922 | v |= -1 ^ 0x7; |
| 923 | /* unsigned necessary due to left shifts of -ve values. */ |
| 924 | unsigned uv = unsigned (v); |
| 925 | if ((ptr = read (count: ++bytes))) |
| 926 | while (bytes--) |
| 927 | uv = (uv << 8) | (*ptr++ & 0xff); |
| 928 | v = int (uv); |
| 929 | } |
| 930 | else if (v & 0x40) |
| 931 | v |= -1 ^ 0x3f; |
| 932 | } |
| 933 | |
| 934 | return v; |
| 935 | } |
| 936 | |
| 937 | void |
| 938 | bytes_out::u (unsigned v) |
| 939 | { |
| 940 | if (char *ptr = write (count: 1)) |
| 941 | { |
| 942 | if (v <= 0x7f) |
| 943 | *ptr = v; |
| 944 | else |
| 945 | { |
| 946 | unsigned bytes = 0; |
| 947 | unsigned probe; |
| 948 | for (probe = v >> 8; probe > 0xf; probe >>= 8) |
| 949 | bytes++; |
| 950 | *ptr = 0x80 | bytes << 4 | probe; |
| 951 | if ((ptr = write (count: ++bytes))) |
| 952 | for (; bytes--; v >>= 8) |
| 953 | ptr[bytes] = v & 0xff; |
| 954 | } |
| 955 | } |
| 956 | } |
| 957 | |
| 958 | unsigned |
| 959 | bytes_in::u () |
| 960 | { |
| 961 | unsigned v = 0; |
| 962 | |
| 963 | if (const char *ptr = read (count: 1)) |
| 964 | { |
| 965 | v = *ptr & 0xff; |
| 966 | if (v & 0x80) |
| 967 | { |
| 968 | unsigned bytes = (v >> 4) & 0x7; |
| 969 | v &= 0xf; |
| 970 | if ((ptr = read (count: ++bytes))) |
| 971 | while (bytes--) |
| 972 | v = (v << 8) | (*ptr++ & 0xff); |
| 973 | } |
| 974 | } |
| 975 | |
| 976 | return v; |
| 977 | } |
| 978 | |
| 979 | void |
| 980 | bytes_out::wi (HOST_WIDE_INT v) |
| 981 | { |
| 982 | if (char *ptr = write (count: 1)) |
| 983 | { |
| 984 | if (v <= 0x3f && v >= -0x40) |
| 985 | *ptr = v & 0x7f; |
| 986 | else |
| 987 | { |
| 988 | unsigned bytes = 0; |
| 989 | HOST_WIDE_INT probe; |
| 990 | if (v >= 0) |
| 991 | for (probe = v >> 8; probe > 0x7; probe >>= 8) |
| 992 | bytes++; |
| 993 | else |
| 994 | for (probe = v >> 8; probe < -0x8; probe >>= 8) |
| 995 | bytes++; |
| 996 | *ptr = 0x80 | bytes << 4 | (probe & 0xf); |
| 997 | if ((ptr = write (count: ++bytes))) |
| 998 | for (; bytes--; v >>= 8) |
| 999 | ptr[bytes] = v & 0xff; |
| 1000 | } |
| 1001 | } |
| 1002 | } |
| 1003 | |
| 1004 | HOST_WIDE_INT |
| 1005 | bytes_in::wi () |
| 1006 | { |
| 1007 | HOST_WIDE_INT v = 0; |
| 1008 | if (const char *ptr = read (count: 1)) |
| 1009 | { |
| 1010 | v = *ptr & 0xff; |
| 1011 | if (v & 0x80) |
| 1012 | { |
| 1013 | unsigned bytes = (v >> 4) & 0x7; |
| 1014 | v &= 0xf; |
| 1015 | if (v & 0x8) |
| 1016 | v |= -1 ^ 0x7; |
| 1017 | /* unsigned necessary due to left shifts of -ve values. */ |
| 1018 | unsigned HOST_WIDE_INT uv = (unsigned HOST_WIDE_INT) v; |
| 1019 | if ((ptr = read (count: ++bytes))) |
| 1020 | while (bytes--) |
| 1021 | uv = (uv << 8) | (*ptr++ & 0xff); |
| 1022 | v = (HOST_WIDE_INT) uv; |
| 1023 | } |
| 1024 | else if (v & 0x40) |
| 1025 | v |= -1 ^ 0x3f; |
| 1026 | } |
| 1027 | |
| 1028 | return v; |
| 1029 | } |
| 1030 | |
| 1031 | /* unsigned wide ints are just written as signed wide ints. */ |
| 1032 | |
| 1033 | inline void |
| 1034 | bytes_out::wu (unsigned HOST_WIDE_INT v) |
| 1035 | { |
| 1036 | wi (v: (HOST_WIDE_INT) v); |
| 1037 | } |
| 1038 | |
| 1039 | inline unsigned HOST_WIDE_INT |
| 1040 | bytes_in::wu () |
| 1041 | { |
| 1042 | return (unsigned HOST_WIDE_INT) wi (); |
| 1043 | } |
| 1044 | |
| 1045 | /* size_t written as unsigned or unsigned wide int. */ |
| 1046 | |
| 1047 | inline void |
| 1048 | bytes_out::z (size_t s) |
| 1049 | { |
| 1050 | if (sizeof (s) == sizeof (unsigned)) |
| 1051 | u (v: s); |
| 1052 | else |
| 1053 | wu (v: s); |
| 1054 | } |
| 1055 | |
| 1056 | inline size_t |
| 1057 | bytes_in::z () |
| 1058 | { |
| 1059 | if (sizeof (size_t) == sizeof (unsigned)) |
| 1060 | return u (); |
| 1061 | else |
| 1062 | return wu (); |
| 1063 | } |
| 1064 | |
| 1065 | /* location_t written as 32- or 64-bit as needed. */ |
| 1066 | |
| 1067 | inline void bytes_out::loc (location_t l) |
| 1068 | { |
| 1069 | if (sizeof (location_t) > sizeof (unsigned)) |
| 1070 | wu (v: l); |
| 1071 | else |
| 1072 | u (v: l); |
| 1073 | } |
| 1074 | |
| 1075 | inline location_t bytes_in::loc () |
| 1076 | { |
| 1077 | if (sizeof (location_t) > sizeof (unsigned)) |
| 1078 | return wu (); |
| 1079 | else |
| 1080 | return u (); |
| 1081 | } |
| 1082 | |
| 1083 | /* Buffer simply memcpied. */ |
| 1084 | void * |
| 1085 | bytes_out::buf (size_t len) |
| 1086 | { |
| 1087 | align (boundary: sizeof (void *) * 2); |
| 1088 | return write (count: len); |
| 1089 | } |
| 1090 | |
| 1091 | void |
| 1092 | bytes_out::buf (const void *src, size_t len) |
| 1093 | { |
| 1094 | if (void *ptr = buf (len)) |
| 1095 | memcpy (dest: ptr, src: src, n: len); |
| 1096 | } |
| 1097 | |
| 1098 | const void * |
| 1099 | bytes_in::buf (size_t len) |
| 1100 | { |
| 1101 | align (boundary: sizeof (void *) * 2); |
| 1102 | const char *ptr = read (count: len); |
| 1103 | |
| 1104 | return ptr; |
| 1105 | } |
| 1106 | |
| 1107 | /* strings as an size_t length, followed by the buffer. Make sure |
| 1108 | there's a NUL terminator on read. */ |
| 1109 | |
| 1110 | void |
| 1111 | bytes_out::str (const char *string, size_t len) |
| 1112 | { |
| 1113 | z (s: len); |
| 1114 | if (len) |
| 1115 | { |
| 1116 | gcc_checking_assert (!string[len]); |
| 1117 | buf (src: string, len: len + 1); |
| 1118 | } |
| 1119 | } |
| 1120 | |
| 1121 | const char * |
| 1122 | bytes_in::str (size_t *len_p) |
| 1123 | { |
| 1124 | size_t len = z (); |
| 1125 | |
| 1126 | /* We're about to trust some user data. */ |
| 1127 | if (overrun) |
| 1128 | len = 0; |
| 1129 | if (len_p) |
| 1130 | *len_p = len; |
| 1131 | const char *str = NULL; |
| 1132 | if (len) |
| 1133 | { |
| 1134 | str = reinterpret_cast<const char *> (buf (len: len + 1)); |
| 1135 | if (!str || str[len]) |
| 1136 | { |
| 1137 | set_overrun (); |
| 1138 | str = NULL; |
| 1139 | } |
| 1140 | } |
| 1141 | return str ? str : "" ; |
| 1142 | } |
| 1143 | |
| 1144 | cpp_hashnode * |
| 1145 | bytes_in::cpp_node () |
| 1146 | { |
| 1147 | size_t len; |
| 1148 | const char *s = str (len_p: &len); |
| 1149 | if (!len) |
| 1150 | return NULL; |
| 1151 | return ::cpp_node (id: get_identifier_with_length (s, len)); |
| 1152 | } |
| 1153 | |
| 1154 | /* Format a string directly to the buffer, including a terminating |
| 1155 | NUL. Intended for human consumption. */ |
| 1156 | |
| 1157 | void |
| 1158 | bytes_out::printf (const char *format, ...) |
| 1159 | { |
| 1160 | va_list args; |
| 1161 | /* Exercise buffer expansion. */ |
| 1162 | size_t len = EXPERIMENT (10, 500); |
| 1163 | |
| 1164 | while (char *ptr = write (count: len)) |
| 1165 | { |
| 1166 | va_start (args, format); |
| 1167 | size_t actual = vsnprintf (s: ptr, maxlen: len, format: format, arg: args) + 1; |
| 1168 | va_end (args); |
| 1169 | if (actual <= len) |
| 1170 | { |
| 1171 | unuse (count: len - actual); |
| 1172 | break; |
| 1173 | } |
| 1174 | unuse (count: len); |
| 1175 | len = actual; |
| 1176 | } |
| 1177 | } |
| 1178 | |
| 1179 | void |
| 1180 | bytes_out::print_time (const char *kind, const tm *time, const char *tz) |
| 1181 | { |
| 1182 | printf (format: "%stime: %4u/%02u/%02u %02u:%02u:%02u %s" , |
| 1183 | kind, time->tm_year + 1900, time->tm_mon + 1, time->tm_mday, |
| 1184 | time->tm_hour, time->tm_min, time->tm_sec, tz); |
| 1185 | } |
| 1186 | |
| 1187 | /* Encapsulated Lazy Records Of Named Declarations. |
| 1188 | Header: Stunningly Elf32_Ehdr-like |
| 1189 | Sections: Sectional data |
| 1190 | [1-N) : User data sections |
| 1191 | N .strtab : strings, stunningly ELF STRTAB-like |
| 1192 | Index: Section table, stunningly ELF32_Shdr-like. */ |
| 1193 | |
| 1194 | class elf { |
| 1195 | protected: |
| 1196 | /* Constants used within the format. */ |
| 1197 | enum private_constants { |
| 1198 | /* File kind. */ |
| 1199 | ET_NONE = 0, |
| 1200 | EM_NONE = 0, |
| 1201 | OSABI_NONE = 0, |
| 1202 | |
| 1203 | /* File format. */ |
| 1204 | EV_CURRENT = 1, |
| 1205 | CLASS32 = 1, |
| 1206 | DATA2LSB = 1, |
| 1207 | DATA2MSB = 2, |
| 1208 | |
| 1209 | /* Section numbering. */ |
| 1210 | SHN_UNDEF = 0, |
| 1211 | SHN_LORESERVE = 0xff00, |
| 1212 | SHN_XINDEX = 0xffff, |
| 1213 | |
| 1214 | /* Section types. */ |
| 1215 | SHT_NONE = 0, /* No contents. */ |
| 1216 | SHT_PROGBITS = 1, /* Random bytes. */ |
| 1217 | SHT_STRTAB = 3, /* A string table. */ |
| 1218 | |
| 1219 | /* Section flags. */ |
| 1220 | SHF_NONE = 0x00, /* Nothing. */ |
| 1221 | SHF_STRINGS = 0x20, /* NUL-Terminated strings. */ |
| 1222 | |
| 1223 | /* I really hope we do not get CMI files larger than 4GB. */ |
| 1224 | MY_CLASS = CLASS32, |
| 1225 | /* It is host endianness that is relevant. */ |
| 1226 | MY_ENDIAN = DATA2LSB |
| 1227 | #ifdef WORDS_BIGENDIAN |
| 1228 | ^ DATA2LSB ^ DATA2MSB |
| 1229 | #endif |
| 1230 | }; |
| 1231 | |
| 1232 | public: |
| 1233 | /* Constants visible to users. */ |
| 1234 | enum public_constants { |
| 1235 | /* Special error codes. Breaking layering a bit. */ |
| 1236 | E_BAD_DATA = -1, /* Random unexpected data errors. */ |
| 1237 | E_BAD_LAZY = -2, /* Badly ordered laziness. */ |
| 1238 | E_BAD_IMPORT = -3 /* A nested import failed. */ |
| 1239 | }; |
| 1240 | |
| 1241 | protected: |
| 1242 | /* File identification. On-disk representation. */ |
| 1243 | struct ident { |
| 1244 | uint8_t magic[4]; /* 0x7f, 'E', 'L', 'F' */ |
| 1245 | uint8_t klass; /* 4:CLASS32 */ |
| 1246 | uint8_t data; /* 5:DATA2[LM]SB */ |
| 1247 | uint8_t version; /* 6:EV_CURRENT */ |
| 1248 | uint8_t osabi; /* 7:OSABI_NONE */ |
| 1249 | uint8_t abiver; /* 8: 0 */ |
| 1250 | uint8_t pad[7]; /* 9-15 */ |
| 1251 | }; |
| 1252 | /* File header. On-disk representation. */ |
| 1253 | struct { |
| 1254 | struct ident ; |
| 1255 | uint16_t ; /* ET_NONE */ |
| 1256 | uint16_t ; /* EM_NONE */ |
| 1257 | uint32_t ; /* EV_CURRENT */ |
| 1258 | uint32_t ; /* 0 */ |
| 1259 | uint32_t ; /* 0 */ |
| 1260 | uint32_t ; /* Section Header Offset in file */ |
| 1261 | uint32_t ; |
| 1262 | uint16_t ; /* ELROND Header SIZE -- sizeof (header) */ |
| 1263 | uint16_t ; /* 0 */ |
| 1264 | uint16_t ; /* 0 */ |
| 1265 | uint16_t ; /* Section Header SIZE -- sizeof (section) */ |
| 1266 | uint16_t ; /* Section Header NUM */ |
| 1267 | uint16_t ; /* Section Header STRing iNDeX */ |
| 1268 | }; |
| 1269 | /* File section. On-disk representation. */ |
| 1270 | struct section { |
| 1271 | uint32_t name; /* String table offset. */ |
| 1272 | uint32_t type; /* SHT_* */ |
| 1273 | uint32_t flags; /* SHF_* */ |
| 1274 | uint32_t addr; /* 0 */ |
| 1275 | uint32_t offset; /* OFFSET in file */ |
| 1276 | uint32_t size; /* SIZE of section */ |
| 1277 | uint32_t link; /* 0 */ |
| 1278 | uint32_t info; /* 0 */ |
| 1279 | uint32_t addralign; /* 0 */ |
| 1280 | uint32_t entsize; /* ENTry SIZE, usually 0 */ |
| 1281 | }; |
| 1282 | |
| 1283 | protected: |
| 1284 | data hdr; /* The header. */ |
| 1285 | data sectab; /* The section table. */ |
| 1286 | data strtab; /* String table. */ |
| 1287 | int fd; /* File descriptor we're reading or writing. */ |
| 1288 | int err; /* Sticky error code. */ |
| 1289 | |
| 1290 | public: |
| 1291 | /* Construct from STREAM. E is errno if STREAM NULL. */ |
| 1292 | elf (int fd, int e) |
| 1293 | :hdr (), sectab (), strtab (), fd (fd), err (fd >= 0 ? 0 : e) |
| 1294 | {} |
| 1295 | ~elf () |
| 1296 | { |
| 1297 | gcc_checking_assert (fd < 0 && !hdr.buffer |
| 1298 | && !sectab.buffer && !strtab.buffer); |
| 1299 | } |
| 1300 | |
| 1301 | public: |
| 1302 | /* Return the error, if we have an error. */ |
| 1303 | int get_error () const |
| 1304 | { |
| 1305 | return err; |
| 1306 | } |
| 1307 | /* Set the error, unless it's already been set. */ |
| 1308 | void set_error (int e = E_BAD_DATA) |
| 1309 | { |
| 1310 | if (!err) |
| 1311 | err = e; |
| 1312 | } |
| 1313 | /* Get an error string. */ |
| 1314 | const char *get_error (const char *) const; |
| 1315 | |
| 1316 | public: |
| 1317 | /* Begin reading/writing file. Return false on error. */ |
| 1318 | bool begin () const |
| 1319 | { |
| 1320 | return !get_error (); |
| 1321 | } |
| 1322 | /* Finish reading/writing file. Return false on error. */ |
| 1323 | bool end (); |
| 1324 | }; |
| 1325 | |
| 1326 | /* Return error string. */ |
| 1327 | |
| 1328 | const char * |
| 1329 | elf::get_error (const char *name) const |
| 1330 | { |
| 1331 | if (!name) |
| 1332 | return "Unknown CMI mapping" ; |
| 1333 | |
| 1334 | switch (err) |
| 1335 | { |
| 1336 | case 0: |
| 1337 | gcc_unreachable (); |
| 1338 | case E_BAD_DATA: |
| 1339 | return "Bad file data" ; |
| 1340 | case E_BAD_IMPORT: |
| 1341 | return "Bad import dependency" ; |
| 1342 | case E_BAD_LAZY: |
| 1343 | return "Bad lazy ordering" ; |
| 1344 | default: |
| 1345 | return xstrerror (err); |
| 1346 | } |
| 1347 | } |
| 1348 | |
| 1349 | /* Finish file, return true if there's an error. */ |
| 1350 | |
| 1351 | bool |
| 1352 | elf::end () |
| 1353 | { |
| 1354 | /* Close the stream and free the section table. */ |
| 1355 | if (fd >= 0 && close (fd: fd)) |
| 1356 | set_error (errno); |
| 1357 | fd = -1; |
| 1358 | |
| 1359 | return !get_error (); |
| 1360 | } |
| 1361 | |
| 1362 | /* ELROND reader. */ |
| 1363 | |
| 1364 | class elf_in : public elf { |
| 1365 | typedef elf parent; |
| 1366 | |
| 1367 | private: |
| 1368 | /* For freezing & defrosting. */ |
| 1369 | #if !defined (HOST_LACKS_INODE_NUMBERS) |
| 1370 | dev_t device; |
| 1371 | ino_t inode; |
| 1372 | #endif |
| 1373 | |
| 1374 | public: |
| 1375 | elf_in (int fd, int e) |
| 1376 | :parent (fd, e) |
| 1377 | { |
| 1378 | } |
| 1379 | ~elf_in () |
| 1380 | { |
| 1381 | } |
| 1382 | |
| 1383 | public: |
| 1384 | bool is_frozen () const |
| 1385 | { |
| 1386 | return fd < 0 && hdr.pos; |
| 1387 | } |
| 1388 | bool is_freezable () const |
| 1389 | { |
| 1390 | return fd >= 0 && hdr.pos; |
| 1391 | } |
| 1392 | void freeze (); |
| 1393 | bool defrost (const char *); |
| 1394 | |
| 1395 | /* If BYTES is in the mmapped area, allocate a new buffer for it. */ |
| 1396 | void preserve (bytes_in &bytes ATTRIBUTE_UNUSED) |
| 1397 | { |
| 1398 | #if MAPPED_READING |
| 1399 | if (hdr.buffer && bytes.buffer >= hdr.buffer |
| 1400 | && bytes.buffer < hdr.buffer + hdr.pos) |
| 1401 | { |
| 1402 | char *buf = bytes.buffer; |
| 1403 | bytes.buffer = data::simple_memory.grow (NULL, needed: bytes.size); |
| 1404 | memcpy (dest: bytes.buffer, src: buf, n: bytes.size); |
| 1405 | } |
| 1406 | #endif |
| 1407 | } |
| 1408 | /* If BYTES is not in SELF's mmapped area, free it. SELF might be |
| 1409 | NULL. */ |
| 1410 | static void release (elf_in *self ATTRIBUTE_UNUSED, bytes_in &bytes) |
| 1411 | { |
| 1412 | #if MAPPED_READING |
| 1413 | if (!(self && self->hdr.buffer && bytes.buffer >= self->hdr.buffer |
| 1414 | && bytes.buffer < self->hdr.buffer + self->hdr.pos)) |
| 1415 | #endif |
| 1416 | data::simple_memory.shrink (ptr: bytes.buffer); |
| 1417 | bytes.buffer = NULL; |
| 1418 | bytes.size = 0; |
| 1419 | } |
| 1420 | |
| 1421 | public: |
| 1422 | static void grow (data &data, unsigned needed) |
| 1423 | { |
| 1424 | gcc_checking_assert (!data.buffer); |
| 1425 | #if !MAPPED_READING |
| 1426 | data.buffer = XNEWVEC (char, needed); |
| 1427 | #endif |
| 1428 | data.size = needed; |
| 1429 | } |
| 1430 | static void shrink (data &data) |
| 1431 | { |
| 1432 | #if !MAPPED_READING |
| 1433 | XDELETEVEC (data.buffer); |
| 1434 | #endif |
| 1435 | data.buffer = NULL; |
| 1436 | data.size = 0; |
| 1437 | } |
| 1438 | |
| 1439 | public: |
| 1440 | const section *get_section (unsigned s) const |
| 1441 | { |
| 1442 | if (s * sizeof (section) < sectab.size) |
| 1443 | return reinterpret_cast<const section *> |
| 1444 | (§ab.buffer[s * sizeof (section)]); |
| 1445 | else |
| 1446 | return NULL; |
| 1447 | } |
| 1448 | unsigned get_section_limit () const |
| 1449 | { |
| 1450 | return sectab.size / sizeof (section); |
| 1451 | } |
| 1452 | |
| 1453 | protected: |
| 1454 | const char *read (data *, unsigned, unsigned); |
| 1455 | |
| 1456 | public: |
| 1457 | /* Read section by number. */ |
| 1458 | bool read (data *d, const section *s) |
| 1459 | { |
| 1460 | return s && read (d, s->offset, s->size); |
| 1461 | } |
| 1462 | |
| 1463 | /* Find section by name. */ |
| 1464 | unsigned find (const char *name); |
| 1465 | /* Find section by index. */ |
| 1466 | const section *find (unsigned snum, unsigned type = SHT_PROGBITS); |
| 1467 | |
| 1468 | public: |
| 1469 | /* Release the string table, when we're done with it. */ |
| 1470 | void release () |
| 1471 | { |
| 1472 | shrink (data&: strtab); |
| 1473 | } |
| 1474 | |
| 1475 | public: |
| 1476 | bool begin (location_t); |
| 1477 | bool end () |
| 1478 | { |
| 1479 | release (); |
| 1480 | #if MAPPED_READING |
| 1481 | if (hdr.buffer) |
| 1482 | munmap (addr: hdr.buffer, len: hdr.pos); |
| 1483 | hdr.buffer = NULL; |
| 1484 | #endif |
| 1485 | shrink (data&: sectab); |
| 1486 | |
| 1487 | return parent::end (); |
| 1488 | } |
| 1489 | |
| 1490 | public: |
| 1491 | /* Return string name at OFFSET. Checks OFFSET range. Always |
| 1492 | returns non-NULL. We know offset 0 is an empty string. */ |
| 1493 | const char *name (unsigned offset) |
| 1494 | { |
| 1495 | return &strtab.buffer[offset < strtab.size ? offset : 0]; |
| 1496 | } |
| 1497 | }; |
| 1498 | |
| 1499 | /* ELROND writer. */ |
| 1500 | |
| 1501 | class elf_out : public elf, public data::allocator { |
| 1502 | typedef elf parent; |
| 1503 | /* Desired section alignment on disk. */ |
| 1504 | static const int SECTION_ALIGN = 16; |
| 1505 | |
| 1506 | private: |
| 1507 | ptr_int_hash_map identtab; /* Map of IDENTIFIERS to strtab offsets. */ |
| 1508 | unsigned pos; /* Write position in file. */ |
| 1509 | #if MAPPED_WRITING |
| 1510 | unsigned offset; /* Offset of the mapping. */ |
| 1511 | unsigned extent; /* Length of mapping. */ |
| 1512 | unsigned page_size; /* System page size. */ |
| 1513 | #endif |
| 1514 | |
| 1515 | public: |
| 1516 | elf_out (int fd, int e) |
| 1517 | :parent (fd, e), identtab (500), pos (0) |
| 1518 | { |
| 1519 | #if MAPPED_WRITING |
| 1520 | offset = extent = 0; |
| 1521 | page_size = sysconf (_SC_PAGE_SIZE); |
| 1522 | if (page_size < SECTION_ALIGN) |
| 1523 | /* Something really strange. */ |
| 1524 | set_error (EINVAL); |
| 1525 | #endif |
| 1526 | } |
| 1527 | ~elf_out () |
| 1528 | { |
| 1529 | data::simple_memory.shrink (obj&: hdr); |
| 1530 | data::simple_memory.shrink (obj&: sectab); |
| 1531 | data::simple_memory.shrink (obj&: strtab); |
| 1532 | } |
| 1533 | |
| 1534 | #if MAPPED_WRITING |
| 1535 | private: |
| 1536 | void create_mapping (unsigned ext, bool extending = true); |
| 1537 | void remove_mapping (); |
| 1538 | #endif |
| 1539 | |
| 1540 | protected: |
| 1541 | using allocator::grow; |
| 1542 | char *grow (char *, unsigned needed) final override; |
| 1543 | #if MAPPED_WRITING |
| 1544 | using allocator::shrink; |
| 1545 | void shrink (char *) final override; |
| 1546 | #endif |
| 1547 | |
| 1548 | public: |
| 1549 | unsigned get_section_limit () const |
| 1550 | { |
| 1551 | return sectab.pos / sizeof (section); |
| 1552 | } |
| 1553 | |
| 1554 | protected: |
| 1555 | unsigned add (unsigned type, unsigned name = 0, |
| 1556 | unsigned off = 0, unsigned size = 0, unsigned flags = SHF_NONE); |
| 1557 | unsigned write (const data &); |
| 1558 | #if MAPPED_WRITING |
| 1559 | unsigned write (const bytes_out &); |
| 1560 | #endif |
| 1561 | |
| 1562 | public: |
| 1563 | /* IDENTIFIER to strtab offset. */ |
| 1564 | unsigned name (tree ident); |
| 1565 | /* String literal to strtab offset. */ |
| 1566 | unsigned name (const char *n); |
| 1567 | /* Qualified name of DECL to strtab offset. */ |
| 1568 | unsigned qualified_name (tree decl, bool is_defn); |
| 1569 | |
| 1570 | private: |
| 1571 | unsigned strtab_write (const char *s, unsigned l); |
| 1572 | void strtab_write (tree decl, int); |
| 1573 | |
| 1574 | public: |
| 1575 | /* Add a section with contents or strings. */ |
| 1576 | unsigned add (const bytes_out &, bool string_p, unsigned name); |
| 1577 | |
| 1578 | public: |
| 1579 | /* Begin and end writing. */ |
| 1580 | bool begin (); |
| 1581 | bool end (); |
| 1582 | }; |
| 1583 | |
| 1584 | /* Begin reading section NAME (of type PROGBITS) from SOURCE. |
| 1585 | Data always checked for CRC. */ |
| 1586 | |
| 1587 | bool |
| 1588 | bytes_in::begin (location_t loc, elf_in *source, const char *name) |
| 1589 | { |
| 1590 | unsigned snum = source->find (name); |
| 1591 | |
| 1592 | return begin (loc, src: source, snum, name); |
| 1593 | } |
| 1594 | |
| 1595 | /* Begin reading section numbered SNUM with NAME (may be NULL). */ |
| 1596 | |
| 1597 | bool |
| 1598 | bytes_in::begin (location_t loc, elf_in *source, unsigned snum, const char *name) |
| 1599 | { |
| 1600 | if (!source->read (d: this, s: source->find (snum)) |
| 1601 | || !size || !check_crc ()) |
| 1602 | { |
| 1603 | source->set_error (elf::E_BAD_DATA); |
| 1604 | source->shrink (data&: *this); |
| 1605 | if (name) |
| 1606 | error_at (loc, "section %qs is missing or corrupted" , name); |
| 1607 | else |
| 1608 | error_at (loc, "section #%u is missing or corrupted" , snum); |
| 1609 | return false; |
| 1610 | } |
| 1611 | pos = 4; |
| 1612 | return true; |
| 1613 | } |
| 1614 | |
| 1615 | /* Finish reading a section. */ |
| 1616 | |
| 1617 | bool |
| 1618 | bytes_in::end (elf_in *src) |
| 1619 | { |
| 1620 | if (more_p ()) |
| 1621 | set_overrun (); |
| 1622 | if (overrun) |
| 1623 | src->set_error (); |
| 1624 | |
| 1625 | src->shrink (data&: *this); |
| 1626 | |
| 1627 | return !overrun; |
| 1628 | } |
| 1629 | |
| 1630 | /* Begin writing buffer. */ |
| 1631 | |
| 1632 | void |
| 1633 | bytes_out::begin (bool need_crc) |
| 1634 | { |
| 1635 | if (need_crc) |
| 1636 | pos = 4; |
| 1637 | memory->grow (obj&: *this, needed: 0, exact: false); |
| 1638 | } |
| 1639 | |
| 1640 | /* Finish writing buffer. Stream out to SINK as named section NAME. |
| 1641 | Return section number or 0 on failure. If CRC_PTR is true, crc |
| 1642 | the data. Otherwise it is a string section. */ |
| 1643 | |
| 1644 | unsigned |
| 1645 | bytes_out::end (elf_out *sink, unsigned name, unsigned *crc_ptr) |
| 1646 | { |
| 1647 | lengths[3] += pos; |
| 1648 | spans[3]++; |
| 1649 | |
| 1650 | set_crc (crc_ptr); |
| 1651 | unsigned sec_num = sink->add (*this, string_p: !crc_ptr, name); |
| 1652 | memory->shrink (obj&: *this); |
| 1653 | |
| 1654 | return sec_num; |
| 1655 | } |
| 1656 | |
| 1657 | /* Close and open the file, without destroying it. */ |
| 1658 | |
| 1659 | void |
| 1660 | elf_in::freeze () |
| 1661 | { |
| 1662 | gcc_checking_assert (!is_frozen ()); |
| 1663 | #if MAPPED_READING |
| 1664 | if (munmap (addr: hdr.buffer, len: hdr.pos) < 0) |
| 1665 | set_error (errno); |
| 1666 | #endif |
| 1667 | if (close (fd: fd) < 0) |
| 1668 | set_error (errno); |
| 1669 | fd = -1; |
| 1670 | } |
| 1671 | |
| 1672 | bool |
| 1673 | elf_in::defrost (const char *name) |
| 1674 | { |
| 1675 | gcc_checking_assert (is_frozen ()); |
| 1676 | struct stat stat; |
| 1677 | |
| 1678 | fd = open (file: name, O_RDONLY | O_CLOEXEC | O_BINARY); |
| 1679 | if (fd < 0 || fstat (fd: fd, buf: &stat) < 0) |
| 1680 | set_error (errno); |
| 1681 | else |
| 1682 | { |
| 1683 | bool ok = hdr.pos == unsigned (stat.st_size); |
| 1684 | #ifndef HOST_LACKS_INODE_NUMBERS |
| 1685 | if (device != stat.st_dev |
| 1686 | || inode != stat.st_ino) |
| 1687 | ok = false; |
| 1688 | #endif |
| 1689 | if (!ok) |
| 1690 | set_error (EMFILE); |
| 1691 | #if MAPPED_READING |
| 1692 | if (ok) |
| 1693 | { |
| 1694 | char *mapping = reinterpret_cast<char *> |
| 1695 | (mmap (NULL, len: hdr.pos, PROT_READ, MAP_SHARED, fd: fd, offset: 0)); |
| 1696 | if (mapping == MAP_FAILED) |
| 1697 | fail: |
| 1698 | set_error (errno); |
| 1699 | else |
| 1700 | { |
| 1701 | if (madvise (addr: mapping, len: hdr.pos, MADV_RANDOM)) |
| 1702 | goto fail; |
| 1703 | |
| 1704 | /* These buffers are never NULL in this case. */ |
| 1705 | strtab.buffer = mapping + strtab.pos; |
| 1706 | sectab.buffer = mapping + sectab.pos; |
| 1707 | hdr.buffer = mapping; |
| 1708 | } |
| 1709 | } |
| 1710 | #endif |
| 1711 | } |
| 1712 | |
| 1713 | return !get_error (); |
| 1714 | } |
| 1715 | |
| 1716 | /* Read at current position into BUFFER. Return true on success. */ |
| 1717 | |
| 1718 | const char * |
| 1719 | elf_in::read (data *data, unsigned pos, unsigned length) |
| 1720 | { |
| 1721 | #if MAPPED_READING |
| 1722 | if (pos + length > hdr.pos) |
| 1723 | { |
| 1724 | set_error (EINVAL); |
| 1725 | return NULL; |
| 1726 | } |
| 1727 | #else |
| 1728 | if (pos != ~0u && lseek (fd, pos, SEEK_SET) < 0) |
| 1729 | { |
| 1730 | set_error (errno); |
| 1731 | return NULL; |
| 1732 | } |
| 1733 | #endif |
| 1734 | grow (data&: *data, needed: length); |
| 1735 | #if MAPPED_READING |
| 1736 | data->buffer = hdr.buffer + pos; |
| 1737 | #else |
| 1738 | if (::read (fd, data->buffer, data->size) != ssize_t (length)) |
| 1739 | { |
| 1740 | set_error (errno); |
| 1741 | shrink (*data); |
| 1742 | return NULL; |
| 1743 | } |
| 1744 | #endif |
| 1745 | |
| 1746 | return data->buffer; |
| 1747 | } |
| 1748 | |
| 1749 | /* Read section SNUM of TYPE. Return section pointer or NULL on error. */ |
| 1750 | |
| 1751 | const elf::section * |
| 1752 | elf_in::find (unsigned snum, unsigned type) |
| 1753 | { |
| 1754 | const section *sec = get_section (s: snum); |
| 1755 | if (!snum || !sec || sec->type != type) |
| 1756 | return NULL; |
| 1757 | return sec; |
| 1758 | } |
| 1759 | |
| 1760 | /* Find a section NAME and TYPE. Return section number, or zero on |
| 1761 | failure. */ |
| 1762 | |
| 1763 | unsigned |
| 1764 | elf_in::find (const char *sname) |
| 1765 | { |
| 1766 | for (unsigned pos = sectab.size; pos -= sizeof (section); ) |
| 1767 | { |
| 1768 | const section *sec |
| 1769 | = reinterpret_cast<const section *> (§ab.buffer[pos]); |
| 1770 | |
| 1771 | if (0 == strcmp (s1: sname, s2: name (offset: sec->name))) |
| 1772 | return pos / sizeof (section); |
| 1773 | } |
| 1774 | |
| 1775 | return 0; |
| 1776 | } |
| 1777 | |
| 1778 | /* Begin reading file. Verify header. Pull in section and string |
| 1779 | tables. Return true on success. */ |
| 1780 | |
| 1781 | bool |
| 1782 | elf_in::begin (location_t loc) |
| 1783 | { |
| 1784 | if (!parent::begin ()) |
| 1785 | return false; |
| 1786 | |
| 1787 | struct stat stat; |
| 1788 | unsigned size = 0; |
| 1789 | if (!fstat (fd: fd, buf: &stat)) |
| 1790 | { |
| 1791 | #if !defined (HOST_LACKS_INODE_NUMBERS) |
| 1792 | device = stat.st_dev; |
| 1793 | inode = stat.st_ino; |
| 1794 | #endif |
| 1795 | /* Never generate files > 4GB, check we've not been given one. */ |
| 1796 | if (stat.st_size == unsigned (stat.st_size)) |
| 1797 | size = unsigned (stat.st_size); |
| 1798 | } |
| 1799 | |
| 1800 | #if MAPPED_READING |
| 1801 | /* MAP_SHARED so that the file is backing store. If someone else |
| 1802 | concurrently writes it, they're wrong. */ |
| 1803 | void *mapping = mmap (NULL, len: size, PROT_READ, MAP_SHARED, fd: fd, offset: 0); |
| 1804 | if (mapping == MAP_FAILED) |
| 1805 | { |
| 1806 | fail: |
| 1807 | set_error (errno); |
| 1808 | return false; |
| 1809 | } |
| 1810 | /* We'll be hopping over this randomly. Some systems declare the |
| 1811 | first parm as char *, and other declare it as void *. */ |
| 1812 | if (madvise (addr: reinterpret_cast <char *> (mapping), len: size, MADV_RANDOM)) |
| 1813 | goto fail; |
| 1814 | |
| 1815 | hdr.buffer = (char *)mapping; |
| 1816 | #else |
| 1817 | read (&hdr, 0, sizeof (header)); |
| 1818 | #endif |
| 1819 | hdr.pos = size; /* Record size of the file. */ |
| 1820 | |
| 1821 | const header *h = reinterpret_cast<const header *> (hdr.buffer); |
| 1822 | if (!h) |
| 1823 | return false; |
| 1824 | |
| 1825 | if (h->ident.magic[0] != 0x7f |
| 1826 | || h->ident.magic[1] != 'E' |
| 1827 | || h->ident.magic[2] != 'L' |
| 1828 | || h->ident.magic[3] != 'F') |
| 1829 | { |
| 1830 | error_at (loc, "not Encapsulated Lazy Records of Named Declarations" ); |
| 1831 | failed: |
| 1832 | shrink (data&: hdr); |
| 1833 | return false; |
| 1834 | } |
| 1835 | |
| 1836 | /* We expect a particular format -- the ELF is not intended to be |
| 1837 | distributable. */ |
| 1838 | if (h->ident.klass != MY_CLASS |
| 1839 | || h->ident.data != MY_ENDIAN |
| 1840 | || h->ident.version != EV_CURRENT |
| 1841 | || h->type != ET_NONE |
| 1842 | || h->machine != EM_NONE |
| 1843 | || h->ident.osabi != OSABI_NONE) |
| 1844 | { |
| 1845 | error_at (loc, "unexpected encapsulation format or type" ); |
| 1846 | goto failed; |
| 1847 | } |
| 1848 | |
| 1849 | int e = -1; |
| 1850 | if (!h->shoff || h->shentsize != sizeof (section)) |
| 1851 | { |
| 1852 | malformed: |
| 1853 | set_error (e); |
| 1854 | error_at (loc, "encapsulation is malformed" ); |
| 1855 | goto failed; |
| 1856 | } |
| 1857 | |
| 1858 | unsigned strndx = h->shstrndx; |
| 1859 | unsigned shnum = h->shnum; |
| 1860 | if (shnum == SHN_XINDEX) |
| 1861 | { |
| 1862 | if (!read (data: §ab, pos: h->shoff, length: sizeof (section))) |
| 1863 | { |
| 1864 | section_table_fail: |
| 1865 | e = errno; |
| 1866 | goto malformed; |
| 1867 | } |
| 1868 | shnum = get_section (s: 0)->size; |
| 1869 | /* Freeing does mean we'll re-read it in the case we're not |
| 1870 | mapping, but this is going to be rare. */ |
| 1871 | shrink (data&: sectab); |
| 1872 | } |
| 1873 | |
| 1874 | if (!shnum) |
| 1875 | goto malformed; |
| 1876 | |
| 1877 | if (!read (data: §ab, pos: h->shoff, length: shnum * sizeof (section))) |
| 1878 | goto section_table_fail; |
| 1879 | |
| 1880 | if (strndx == SHN_XINDEX) |
| 1881 | strndx = get_section (s: 0)->link; |
| 1882 | |
| 1883 | if (!read (d: &strtab, s: find (snum: strndx, type: SHT_STRTAB))) |
| 1884 | goto malformed; |
| 1885 | |
| 1886 | /* The string table should be at least one byte, with NUL chars |
| 1887 | at either end. */ |
| 1888 | if (!(strtab.size && !strtab.buffer[0] |
| 1889 | && !strtab.buffer[strtab.size - 1])) |
| 1890 | goto malformed; |
| 1891 | |
| 1892 | #if MAPPED_READING |
| 1893 | /* Record the offsets of the section and string tables. */ |
| 1894 | sectab.pos = h->shoff; |
| 1895 | strtab.pos = shnum * sizeof (section); |
| 1896 | #else |
| 1897 | shrink (hdr); |
| 1898 | #endif |
| 1899 | |
| 1900 | return true; |
| 1901 | } |
| 1902 | |
| 1903 | /* Create a new mapping. */ |
| 1904 | |
| 1905 | #if MAPPED_WRITING |
| 1906 | void |
| 1907 | elf_out::create_mapping (unsigned ext, bool extending) |
| 1908 | { |
| 1909 | /* A wrapper around posix_fallocate, falling back to ftruncate |
| 1910 | if the underlying filesystem does not support the operation. */ |
| 1911 | auto allocate = [](int fd, off_t offset, off_t length) |
| 1912 | { |
| 1913 | #ifdef HAVE_POSIX_FALLOCATE |
| 1914 | int result = posix_fallocate (fd: fd, offset: offset, len: length); |
| 1915 | if (result != EINVAL) |
| 1916 | return result == 0; |
| 1917 | /* Not supported by the underlying filesystem, fallback to ftruncate. */ |
| 1918 | #endif |
| 1919 | return ftruncate (fd: fd, length: offset + length) == 0; |
| 1920 | }; |
| 1921 | |
| 1922 | void *mapping = MAP_FAILED; |
| 1923 | if (extending && ext < 1024 * 1024) |
| 1924 | { |
| 1925 | if (allocate (fd, offset, ext * 2)) |
| 1926 | mapping = mmap (NULL, len: ext * 2, PROT_READ | PROT_WRITE, |
| 1927 | MAP_SHARED, fd: fd, offset: offset); |
| 1928 | if (mapping != MAP_FAILED) |
| 1929 | ext *= 2; |
| 1930 | } |
| 1931 | if (mapping == MAP_FAILED) |
| 1932 | { |
| 1933 | if (!extending || allocate (fd, offset, ext)) |
| 1934 | mapping = mmap (NULL, len: ext, PROT_READ | PROT_WRITE, |
| 1935 | MAP_SHARED, fd: fd, offset: offset); |
| 1936 | if (mapping == MAP_FAILED) |
| 1937 | { |
| 1938 | set_error (errno); |
| 1939 | mapping = NULL; |
| 1940 | ext = 0; |
| 1941 | } |
| 1942 | } |
| 1943 | hdr.buffer = (char *)mapping; |
| 1944 | extent = ext; |
| 1945 | } |
| 1946 | #endif |
| 1947 | |
| 1948 | /* Flush out the current mapping. */ |
| 1949 | |
| 1950 | #if MAPPED_WRITING |
| 1951 | void |
| 1952 | elf_out::remove_mapping () |
| 1953 | { |
| 1954 | if (hdr.buffer) |
| 1955 | { |
| 1956 | /* MS_ASYNC dtrt with the removed mapping, including a |
| 1957 | subsequent overlapping remap. */ |
| 1958 | if (msync (addr: hdr.buffer, len: extent, MS_ASYNC) |
| 1959 | || munmap (addr: hdr.buffer, len: extent)) |
| 1960 | /* We're somewhat screwed at this point. */ |
| 1961 | set_error (errno); |
| 1962 | } |
| 1963 | |
| 1964 | hdr.buffer = NULL; |
| 1965 | } |
| 1966 | #endif |
| 1967 | |
| 1968 | /* Grow a mapping of PTR to be NEEDED bytes long. This gets |
| 1969 | interesting if the new size grows the EXTENT. */ |
| 1970 | |
| 1971 | char * |
| 1972 | elf_out::grow (char *data, unsigned needed) |
| 1973 | { |
| 1974 | if (!data) |
| 1975 | { |
| 1976 | /* First allocation, check we're aligned. */ |
| 1977 | gcc_checking_assert (!(pos & (SECTION_ALIGN - 1))); |
| 1978 | #if MAPPED_WRITING |
| 1979 | data = hdr.buffer + (pos - offset); |
| 1980 | #endif |
| 1981 | } |
| 1982 | |
| 1983 | #if MAPPED_WRITING |
| 1984 | unsigned off = data - hdr.buffer; |
| 1985 | if (off + needed > extent) |
| 1986 | { |
| 1987 | /* We need to grow the mapping. */ |
| 1988 | unsigned lwm = off & ~(page_size - 1); |
| 1989 | unsigned hwm = (off + needed + page_size - 1) & ~(page_size - 1); |
| 1990 | |
| 1991 | gcc_checking_assert (hwm > extent); |
| 1992 | |
| 1993 | remove_mapping (); |
| 1994 | |
| 1995 | offset += lwm; |
| 1996 | create_mapping (ext: extent < hwm - lwm ? hwm - lwm : extent); |
| 1997 | |
| 1998 | data = hdr.buffer + (off - lwm); |
| 1999 | } |
| 2000 | #else |
| 2001 | data = allocator::grow (data, needed); |
| 2002 | #endif |
| 2003 | |
| 2004 | return data; |
| 2005 | } |
| 2006 | |
| 2007 | #if MAPPED_WRITING |
| 2008 | /* Shrinking is a NOP. */ |
| 2009 | void |
| 2010 | elf_out::shrink (char *) |
| 2011 | { |
| 2012 | } |
| 2013 | #endif |
| 2014 | |
| 2015 | /* Write S of length L to the strtab buffer. L must include the ending |
| 2016 | NUL, if that's what you want. */ |
| 2017 | |
| 2018 | unsigned |
| 2019 | elf_out::strtab_write (const char *s, unsigned l) |
| 2020 | { |
| 2021 | if (strtab.pos + l > strtab.size) |
| 2022 | data::simple_memory.grow (obj&: strtab, needed: strtab.pos + l, exact: false); |
| 2023 | memcpy (dest: strtab.buffer + strtab.pos, src: s, n: l); |
| 2024 | unsigned res = strtab.pos; |
| 2025 | strtab.pos += l; |
| 2026 | return res; |
| 2027 | } |
| 2028 | |
| 2029 | /* Write qualified name of decl. INNER >0 if this is a definition, <0 |
| 2030 | if this is a qualifier of an outer name. */ |
| 2031 | |
| 2032 | void |
| 2033 | elf_out::strtab_write (tree decl, int inner) |
| 2034 | { |
| 2035 | tree ctx = CP_DECL_CONTEXT (decl); |
| 2036 | if (TYPE_P (ctx)) |
| 2037 | ctx = TYPE_NAME (ctx); |
| 2038 | if (ctx != global_namespace) |
| 2039 | strtab_write (decl: ctx, inner: -1); |
| 2040 | |
| 2041 | tree name = DECL_NAME (decl); |
| 2042 | if (!name) |
| 2043 | name = DECL_ASSEMBLER_NAME_RAW (decl); |
| 2044 | strtab_write (IDENTIFIER_POINTER (name), IDENTIFIER_LENGTH (name)); |
| 2045 | |
| 2046 | if (inner) |
| 2047 | strtab_write (s: &"::{}" [inner+1], l: 2); |
| 2048 | } |
| 2049 | |
| 2050 | /* Map IDENTIFIER IDENT to strtab offset. Inserts into strtab if not |
| 2051 | already there. */ |
| 2052 | |
| 2053 | unsigned |
| 2054 | elf_out::name (tree ident) |
| 2055 | { |
| 2056 | unsigned res = 0; |
| 2057 | if (ident) |
| 2058 | { |
| 2059 | bool existed; |
| 2060 | int *slot = &identtab.get_or_insert (k: ident, existed: &existed); |
| 2061 | if (!existed) |
| 2062 | *slot = strtab_write (IDENTIFIER_POINTER (ident), |
| 2063 | IDENTIFIER_LENGTH (ident) + 1); |
| 2064 | res = *slot; |
| 2065 | } |
| 2066 | return res; |
| 2067 | } |
| 2068 | |
| 2069 | /* Map LITERAL to strtab offset. Does not detect duplicates and |
| 2070 | expects LITERAL to remain live until strtab is written out. */ |
| 2071 | |
| 2072 | unsigned |
| 2073 | elf_out::name (const char *literal) |
| 2074 | { |
| 2075 | return strtab_write (s: literal, l: strlen (s: literal) + 1); |
| 2076 | } |
| 2077 | |
| 2078 | /* Map a DECL's qualified name to strtab offset. Does not detect |
| 2079 | duplicates. */ |
| 2080 | |
| 2081 | unsigned |
| 2082 | elf_out::qualified_name (tree decl, bool is_defn) |
| 2083 | { |
| 2084 | gcc_checking_assert (DECL_P (decl) && decl != global_namespace); |
| 2085 | unsigned result = strtab.pos; |
| 2086 | |
| 2087 | strtab_write (decl, inner: is_defn); |
| 2088 | strtab_write (s: "" , l: 1); |
| 2089 | |
| 2090 | return result; |
| 2091 | } |
| 2092 | |
| 2093 | /* Add section to file. Return section number. TYPE & NAME identify |
| 2094 | the section. OFF and SIZE identify the file location of its |
| 2095 | data. FLAGS contains additional info. */ |
| 2096 | |
| 2097 | unsigned |
| 2098 | elf_out::add (unsigned type, unsigned name, unsigned off, unsigned size, |
| 2099 | unsigned flags) |
| 2100 | { |
| 2101 | gcc_checking_assert (!(off & (SECTION_ALIGN - 1))); |
| 2102 | if (sectab.pos + sizeof (section) > sectab.size) |
| 2103 | data::simple_memory.grow (obj&: sectab, needed: sectab.pos + sizeof (section), exact: false); |
| 2104 | section *sec = reinterpret_cast<section *> (sectab.buffer + sectab.pos); |
| 2105 | memset (s: sec, c: 0, n: sizeof (section)); |
| 2106 | sec->type = type; |
| 2107 | sec->flags = flags; |
| 2108 | sec->name = name; |
| 2109 | sec->offset = off; |
| 2110 | sec->size = size; |
| 2111 | if (flags & SHF_STRINGS) |
| 2112 | sec->entsize = 1; |
| 2113 | |
| 2114 | unsigned res = sectab.pos; |
| 2115 | sectab.pos += sizeof (section); |
| 2116 | return res / sizeof (section); |
| 2117 | } |
| 2118 | |
| 2119 | /* Pad to the next alignment boundary, then write BUFFER to disk. |
| 2120 | Return the position of the start of the write, or zero on failure. */ |
| 2121 | |
| 2122 | unsigned |
| 2123 | elf_out::write (const data &buffer) |
| 2124 | { |
| 2125 | #if MAPPED_WRITING |
| 2126 | /* HDR is always mapped. */ |
| 2127 | if (&buffer != &hdr) |
| 2128 | { |
| 2129 | bytes_out out (this); |
| 2130 | grow (obj&: out, needed: buffer.pos, exact: true); |
| 2131 | if (out.buffer) |
| 2132 | memcpy (dest: out.buffer, src: buffer.buffer, n: buffer.pos); |
| 2133 | shrink (obj&: out); |
| 2134 | } |
| 2135 | else |
| 2136 | /* We should have been aligned during the first allocation. */ |
| 2137 | gcc_checking_assert (!(pos & (SECTION_ALIGN - 1))); |
| 2138 | #else |
| 2139 | if (::write (fd, buffer.buffer, buffer.pos) != ssize_t (buffer.pos)) |
| 2140 | { |
| 2141 | set_error (errno); |
| 2142 | return 0; |
| 2143 | } |
| 2144 | #endif |
| 2145 | unsigned res = pos; |
| 2146 | pos += buffer.pos; |
| 2147 | |
| 2148 | if (unsigned padding = -pos & (SECTION_ALIGN - 1)) |
| 2149 | { |
| 2150 | #if !MAPPED_WRITING |
| 2151 | /* Align the section on disk, should help the necessary copies. |
| 2152 | fseeking to extend is non-portable. */ |
| 2153 | static char zero[SECTION_ALIGN]; |
| 2154 | if (::write (fd, &zero, padding) != ssize_t (padding)) |
| 2155 | set_error (errno); |
| 2156 | #endif |
| 2157 | pos += padding; |
| 2158 | } |
| 2159 | return res; |
| 2160 | } |
| 2161 | |
| 2162 | /* Write a streaming buffer. It must be using us as an allocator. */ |
| 2163 | |
| 2164 | #if MAPPED_WRITING |
| 2165 | unsigned |
| 2166 | elf_out::write (const bytes_out &buf) |
| 2167 | { |
| 2168 | gcc_checking_assert (buf.memory == this); |
| 2169 | /* A directly mapped buffer. */ |
| 2170 | gcc_checking_assert (buf.buffer - hdr.buffer >= 0 |
| 2171 | && buf.buffer - hdr.buffer + buf.size <= extent); |
| 2172 | unsigned res = pos; |
| 2173 | pos += buf.pos; |
| 2174 | |
| 2175 | /* Align up. We're not going to advance into the next page. */ |
| 2176 | pos += -pos & (SECTION_ALIGN - 1); |
| 2177 | |
| 2178 | return res; |
| 2179 | } |
| 2180 | #endif |
| 2181 | |
| 2182 | /* Write data and add section. STRING_P is true for a string |
| 2183 | section, false for PROGBITS. NAME identifies the section (0 is the |
| 2184 | empty name). DATA is the contents. Return section number or 0 on |
| 2185 | failure (0 is the undef section). */ |
| 2186 | |
| 2187 | unsigned |
| 2188 | elf_out::add (const bytes_out &data, bool string_p, unsigned name) |
| 2189 | { |
| 2190 | unsigned off = write (buf: data); |
| 2191 | |
| 2192 | return add (type: string_p ? SHT_STRTAB : SHT_PROGBITS, name, |
| 2193 | off, size: data.pos, flags: string_p ? SHF_STRINGS : SHF_NONE); |
| 2194 | } |
| 2195 | |
| 2196 | /* Begin writing the file. Initialize the section table and write an |
| 2197 | empty header. Return false on failure. */ |
| 2198 | |
| 2199 | bool |
| 2200 | elf_out::begin () |
| 2201 | { |
| 2202 | if (!parent::begin ()) |
| 2203 | return false; |
| 2204 | |
| 2205 | /* Let the allocators pick a default. */ |
| 2206 | data::simple_memory.grow (obj&: strtab, needed: 0, exact: false); |
| 2207 | data::simple_memory.grow (obj&: sectab, needed: 0, exact: false); |
| 2208 | |
| 2209 | /* The string table starts with an empty string. */ |
| 2210 | name (literal: "" ); |
| 2211 | |
| 2212 | /* Create the UNDEF section. */ |
| 2213 | add (type: SHT_NONE); |
| 2214 | |
| 2215 | #if MAPPED_WRITING |
| 2216 | /* Start a mapping. */ |
| 2217 | create_mapping (EXPERIMENT (page_size, |
| 2218 | (32767 + page_size) & ~(page_size - 1))); |
| 2219 | if (!hdr.buffer) |
| 2220 | return false; |
| 2221 | #endif |
| 2222 | |
| 2223 | /* Write an empty header. */ |
| 2224 | grow (obj&: hdr, needed: sizeof (header), exact: true); |
| 2225 | header *h = reinterpret_cast<header *> (hdr.buffer); |
| 2226 | memset (s: h, c: 0, n: sizeof (header)); |
| 2227 | hdr.pos = hdr.size; |
| 2228 | write (buffer: hdr); |
| 2229 | return !get_error (); |
| 2230 | } |
| 2231 | |
| 2232 | /* Finish writing the file. Write out the string & section tables. |
| 2233 | Fill in the header. Return true on error. */ |
| 2234 | |
| 2235 | bool |
| 2236 | elf_out::end () |
| 2237 | { |
| 2238 | if (fd >= 0) |
| 2239 | { |
| 2240 | /* Write the string table. */ |
| 2241 | unsigned strnam = name (literal: ".strtab" ); |
| 2242 | unsigned stroff = write (buffer: strtab); |
| 2243 | unsigned strndx = add (type: SHT_STRTAB, name: strnam, off: stroff, size: strtab.pos, |
| 2244 | flags: SHF_STRINGS); |
| 2245 | |
| 2246 | /* Store escape values in section[0]. */ |
| 2247 | if (strndx >= SHN_LORESERVE) |
| 2248 | { |
| 2249 | reinterpret_cast<section *> (sectab.buffer)->link = strndx; |
| 2250 | strndx = SHN_XINDEX; |
| 2251 | } |
| 2252 | unsigned shnum = sectab.pos / sizeof (section); |
| 2253 | if (shnum >= SHN_LORESERVE) |
| 2254 | { |
| 2255 | reinterpret_cast<section *> (sectab.buffer)->size = shnum; |
| 2256 | shnum = SHN_XINDEX; |
| 2257 | } |
| 2258 | |
| 2259 | unsigned shoff = write (buffer: sectab); |
| 2260 | |
| 2261 | #if MAPPED_WRITING |
| 2262 | if (offset) |
| 2263 | { |
| 2264 | remove_mapping (); |
| 2265 | offset = 0; |
| 2266 | create_mapping (ext: (sizeof (header) + page_size - 1) & ~(page_size - 1), |
| 2267 | extending: false); |
| 2268 | } |
| 2269 | unsigned length = pos; |
| 2270 | #else |
| 2271 | if (lseek (fd, 0, SEEK_SET) < 0) |
| 2272 | set_error (errno); |
| 2273 | #endif |
| 2274 | /* Write header. */ |
| 2275 | if (!get_error ()) |
| 2276 | { |
| 2277 | /* Write the correct header now. */ |
| 2278 | header *h = reinterpret_cast<header *> (hdr.buffer); |
| 2279 | h->ident.magic[0] = 0x7f; |
| 2280 | h->ident.magic[1] = 'E'; /* Elrond */ |
| 2281 | h->ident.magic[2] = 'L'; /* is an */ |
| 2282 | h->ident.magic[3] = 'F'; /* elf. */ |
| 2283 | h->ident.klass = MY_CLASS; |
| 2284 | h->ident.data = MY_ENDIAN; |
| 2285 | h->ident.version = EV_CURRENT; |
| 2286 | h->ident.osabi = OSABI_NONE; |
| 2287 | h->type = ET_NONE; |
| 2288 | h->machine = EM_NONE; |
| 2289 | h->version = EV_CURRENT; |
| 2290 | h->shoff = shoff; |
| 2291 | h->ehsize = sizeof (header); |
| 2292 | h->shentsize = sizeof (section); |
| 2293 | h->shnum = shnum; |
| 2294 | h->shstrndx = strndx; |
| 2295 | |
| 2296 | pos = 0; |
| 2297 | write (buffer: hdr); |
| 2298 | } |
| 2299 | |
| 2300 | #if MAPPED_WRITING |
| 2301 | remove_mapping (); |
| 2302 | if (ftruncate (fd: fd, length: length)) |
| 2303 | set_error (errno); |
| 2304 | #endif |
| 2305 | } |
| 2306 | |
| 2307 | data::simple_memory.shrink (obj&: sectab); |
| 2308 | data::simple_memory.shrink (obj&: strtab); |
| 2309 | |
| 2310 | return parent::end (); |
| 2311 | } |
| 2312 | |
| 2313 | /********************************************************************/ |
| 2314 | |
| 2315 | /* A dependency set. This is used during stream out to determine the |
| 2316 | connectivity of the graph. Every namespace-scope declaration that |
| 2317 | needs writing has a depset. The depset is filled with the (depsets |
| 2318 | of) declarations within this module that it references. For a |
| 2319 | declaration that'll generally be named types. For definitions |
| 2320 | it'll also be declarations in the body. |
| 2321 | |
| 2322 | From that we can convert the graph to a DAG, via determining the |
| 2323 | Strongly Connected Clusters. Each cluster is streamed |
| 2324 | independently, and thus we achieve lazy loading. |
| 2325 | |
| 2326 | Other decls that get a depset are namespaces themselves and |
| 2327 | unnameable declarations. */ |
| 2328 | |
| 2329 | class depset { |
| 2330 | private: |
| 2331 | tree entity; /* Entity, or containing namespace. */ |
| 2332 | uintptr_t discriminator; /* Flags or identifier. */ |
| 2333 | |
| 2334 | public: |
| 2335 | /* The kinds of entity the depset could describe. The ordering is |
| 2336 | significant, see entity_kind_name. */ |
| 2337 | enum entity_kind |
| 2338 | { |
| 2339 | EK_DECL, /* A decl. */ |
| 2340 | EK_SPECIALIZATION, /* A specialization. */ |
| 2341 | EK_PARTIAL, /* A partial specialization. */ |
| 2342 | EK_USING, /* A using declaration (at namespace scope). */ |
| 2343 | EK_NAMESPACE, /* A namespace. */ |
| 2344 | EK_TU_LOCAL, /* A TU-local decl for ADL. */ |
| 2345 | EK_REDIRECT, /* Redirect to a template_decl. */ |
| 2346 | EK_EXPLICIT_HWM, |
| 2347 | EK_BINDING = EK_EXPLICIT_HWM, /* Implicitly encoded. */ |
| 2348 | EK_FOR_BINDING, /* A decl being inserted for a binding. */ |
| 2349 | EK_INNER_DECL, /* A decl defined outside of its imported |
| 2350 | context. */ |
| 2351 | EK_DIRECT_HWM = EK_PARTIAL + 1, |
| 2352 | |
| 2353 | EK_BITS = 3 /* Only need to encode below EK_EXPLICIT_HWM. */ |
| 2354 | }; |
| 2355 | static_assert (EK_EXPLICIT_HWM < (1u << EK_BITS), |
| 2356 | "not enough bits reserved for entity_kind" ); |
| 2357 | |
| 2358 | private: |
| 2359 | /* Placement of bit fields in discriminator. */ |
| 2360 | enum disc_bits |
| 2361 | { |
| 2362 | DB_ZERO_BIT, /* Set to disambiguate identifier from flags */ |
| 2363 | DB_SPECIAL_BIT, /* First dep slot is special. */ |
| 2364 | DB_KIND_BIT, /* Kind of the entity. */ |
| 2365 | DB_KIND_BITS = EK_BITS, |
| 2366 | DB_DEFN_BIT = DB_KIND_BIT + DB_KIND_BITS, |
| 2367 | DB_IS_PENDING_BIT, /* Is a maybe-pending entity. */ |
| 2368 | DB_TU_LOCAL_BIT, /* Is a TU-local entity. */ |
| 2369 | DB_REF_GLOBAL_BIT, /* Refers to a GMF TU-local entity. */ |
| 2370 | DB_REF_PURVIEW_BIT, /* Refers to a purview TU-local entity. */ |
| 2371 | DB_EXPOSE_GLOBAL_BIT, /* Exposes a GMF TU-local entity. */ |
| 2372 | DB_EXPOSE_PURVIEW_BIT, /* Exposes a purview TU-local entity. */ |
| 2373 | DB_IGNORED_EXPOSURE_BIT, /* Only seen where exposures are ignored. */ |
| 2374 | DB_IMPORTED_BIT, /* An imported entity. */ |
| 2375 | DB_UNREACHED_BIT, /* A yet-to-be reached entity. */ |
| 2376 | DB_MAYBE_RECURSIVE_BIT, /* An entity maybe in a recursive cluster. */ |
| 2377 | DB_ENTRY_BIT, /* The first reached recursive dep. */ |
| 2378 | DB_HIDDEN_BIT, /* A hidden binding. */ |
| 2379 | /* The following bits are not independent, but enumerating them is |
| 2380 | awkward. */ |
| 2381 | DB_TYPE_SPEC_BIT, /* Specialization in the type table. */ |
| 2382 | DB_FRIEND_SPEC_BIT, /* An instantiated template friend. */ |
| 2383 | DB_HWM, |
| 2384 | }; |
| 2385 | static_assert (DB_HWM <= sizeof(discriminator) * CHAR_BIT, |
| 2386 | "not enough bits in discriminator" ); |
| 2387 | |
| 2388 | public: |
| 2389 | /* The first slot is special for EK_SPECIALIZATIONS it is a |
| 2390 | spec_entry pointer. It is not relevant for the SCC |
| 2391 | determination. */ |
| 2392 | vec<depset *> deps; /* Depsets we reference. */ |
| 2393 | |
| 2394 | public: |
| 2395 | unsigned cluster; /* Strongly connected cluster, later entity number */ |
| 2396 | unsigned section; /* Section written to. */ |
| 2397 | /* During SCC construction, section is lowlink, until the depset is |
| 2398 | removed from the stack. See Tarjan algorithm for details. */ |
| 2399 | |
| 2400 | private: |
| 2401 | /* Construction via factories. Destruction via hash traits. */ |
| 2402 | depset (tree entity); |
| 2403 | ~depset (); |
| 2404 | |
| 2405 | public: |
| 2406 | static depset *make_binding (tree, tree); |
| 2407 | static depset *make_entity (tree, entity_kind, bool = false); |
| 2408 | /* Late setting a binding name -- /then/ insert into hash! */ |
| 2409 | inline void set_binding_name (tree name) |
| 2410 | { |
| 2411 | gcc_checking_assert (!get_name ()); |
| 2412 | discriminator = reinterpret_cast<uintptr_t> (name); |
| 2413 | } |
| 2414 | |
| 2415 | private: |
| 2416 | template<unsigned I> void set_flag_bit () |
| 2417 | { |
| 2418 | gcc_checking_assert (I < 2 || !is_binding ()); |
| 2419 | discriminator |= 1u << I; |
| 2420 | } |
| 2421 | template<unsigned I> void clear_flag_bit () |
| 2422 | { |
| 2423 | gcc_checking_assert (I < 2 || !is_binding ()); |
| 2424 | discriminator &= ~(1u << I); |
| 2425 | } |
| 2426 | template<unsigned I> bool get_flag_bit () const |
| 2427 | { |
| 2428 | gcc_checking_assert (I < 2 || !is_binding ()); |
| 2429 | return bool ((discriminator >> I) & 1); |
| 2430 | } |
| 2431 | |
| 2432 | public: |
| 2433 | bool is_binding () const |
| 2434 | { |
| 2435 | return !get_flag_bit<DB_ZERO_BIT> (); |
| 2436 | } |
| 2437 | entity_kind get_entity_kind () const |
| 2438 | { |
| 2439 | if (is_binding ()) |
| 2440 | return EK_BINDING; |
| 2441 | return entity_kind ((discriminator >> DB_KIND_BIT) & ((1u << EK_BITS) - 1)); |
| 2442 | } |
| 2443 | const char *entity_kind_name () const; |
| 2444 | |
| 2445 | public: |
| 2446 | bool has_defn () const |
| 2447 | { |
| 2448 | /* Never consider TU-local entities as having definitions, since |
| 2449 | we will never be accessing them from importers anyway. */ |
| 2450 | return get_flag_bit<DB_DEFN_BIT> () && !is_tu_local (); |
| 2451 | } |
| 2452 | |
| 2453 | public: |
| 2454 | /* This entity might be found other than by namespace-scope lookup; |
| 2455 | see module_state::write_pendings for more details. */ |
| 2456 | bool is_pending_entity () const |
| 2457 | { |
| 2458 | return (get_entity_kind () == EK_SPECIALIZATION |
| 2459 | || get_entity_kind () == EK_PARTIAL |
| 2460 | || (get_entity_kind () == EK_DECL |
| 2461 | && get_flag_bit<DB_IS_PENDING_BIT> ())); |
| 2462 | } |
| 2463 | |
| 2464 | public: |
| 2465 | /* Only consider global module entities as being TU-local |
| 2466 | when STRICT is set; otherwise, as an extension we support |
| 2467 | emitting declarations referencing TU-local GMF entities |
| 2468 | (and only check purview entities), to assist in migration. */ |
| 2469 | bool is_tu_local (bool strict = false) const |
| 2470 | { |
| 2471 | /* Non-strict is only intended for migration purposes, so |
| 2472 | for simplicity's sake we only care about whether this is |
| 2473 | a non-purview variable or function at namespace scope; |
| 2474 | these are the most common cases (coming from C), and |
| 2475 | that way we don't have to care about diagnostics for |
| 2476 | nested types and so forth. */ |
| 2477 | tree inner = STRIP_TEMPLATE (get_entity ()); |
| 2478 | return (get_flag_bit<DB_TU_LOCAL_BIT> () |
| 2479 | && (strict |
| 2480 | || !VAR_OR_FUNCTION_DECL_P (inner) |
| 2481 | || !NAMESPACE_SCOPE_P (inner) |
| 2482 | || (DECL_LANG_SPECIFIC (inner) |
| 2483 | && DECL_MODULE_PURVIEW_P (inner)))); |
| 2484 | } |
| 2485 | bool refs_tu_local (bool strict = false) const |
| 2486 | { |
| 2487 | return (get_flag_bit<DB_REF_PURVIEW_BIT> () |
| 2488 | || (strict && get_flag_bit <DB_REF_GLOBAL_BIT> ())); |
| 2489 | } |
| 2490 | bool is_exposure (bool strict = false) const |
| 2491 | { |
| 2492 | return (get_flag_bit<DB_EXPOSE_PURVIEW_BIT> () |
| 2493 | || (strict && get_flag_bit <DB_EXPOSE_GLOBAL_BIT> ())); |
| 2494 | } |
| 2495 | bool is_ignored_exposure_context () const |
| 2496 | { |
| 2497 | return get_flag_bit<DB_IGNORED_EXPOSURE_BIT> (); |
| 2498 | } |
| 2499 | |
| 2500 | public: |
| 2501 | bool is_import () const |
| 2502 | { |
| 2503 | return get_flag_bit<DB_IMPORTED_BIT> (); |
| 2504 | } |
| 2505 | bool is_unreached () const |
| 2506 | { |
| 2507 | return get_flag_bit<DB_UNREACHED_BIT> (); |
| 2508 | } |
| 2509 | bool is_hidden () const |
| 2510 | { |
| 2511 | return get_flag_bit<DB_HIDDEN_BIT> (); |
| 2512 | } |
| 2513 | bool is_maybe_recursive () const |
| 2514 | { |
| 2515 | return get_flag_bit<DB_MAYBE_RECURSIVE_BIT> (); |
| 2516 | } |
| 2517 | bool is_entry () const |
| 2518 | { |
| 2519 | return get_flag_bit<DB_ENTRY_BIT> (); |
| 2520 | } |
| 2521 | bool is_type_spec () const |
| 2522 | { |
| 2523 | return get_flag_bit<DB_TYPE_SPEC_BIT> (); |
| 2524 | } |
| 2525 | bool is_friend_spec () const |
| 2526 | { |
| 2527 | return get_flag_bit<DB_FRIEND_SPEC_BIT> (); |
| 2528 | } |
| 2529 | |
| 2530 | public: |
| 2531 | /* We set these bit outside of depset. */ |
| 2532 | void set_hidden_binding () |
| 2533 | { |
| 2534 | set_flag_bit<DB_HIDDEN_BIT> (); |
| 2535 | } |
| 2536 | void clear_hidden_binding () |
| 2537 | { |
| 2538 | clear_flag_bit<DB_HIDDEN_BIT> (); |
| 2539 | } |
| 2540 | |
| 2541 | public: |
| 2542 | bool is_special () const |
| 2543 | { |
| 2544 | return get_flag_bit<DB_SPECIAL_BIT> (); |
| 2545 | } |
| 2546 | void set_special () |
| 2547 | { |
| 2548 | set_flag_bit<DB_SPECIAL_BIT> (); |
| 2549 | } |
| 2550 | |
| 2551 | public: |
| 2552 | tree get_entity () const |
| 2553 | { |
| 2554 | return entity; |
| 2555 | } |
| 2556 | tree get_name () const |
| 2557 | { |
| 2558 | gcc_checking_assert (is_binding ()); |
| 2559 | return reinterpret_cast <tree> (discriminator); |
| 2560 | } |
| 2561 | |
| 2562 | public: |
| 2563 | /* Traits for a hash table of pointers to bindings. */ |
| 2564 | struct traits { |
| 2565 | /* Each entry is a pointer to a depset. */ |
| 2566 | typedef depset *value_type; |
| 2567 | /* We lookup by container:maybe-identifier pair. */ |
| 2568 | typedef std::pair<tree,tree> compare_type; |
| 2569 | |
| 2570 | static const bool empty_zero_p = true; |
| 2571 | |
| 2572 | /* hash and equality for compare_type. */ |
| 2573 | inline static hashval_t hash (const compare_type &p) |
| 2574 | { |
| 2575 | hashval_t h = pointer_hash<tree_node>::hash (candidate: p.first); |
| 2576 | if (p.second) |
| 2577 | { |
| 2578 | hashval_t nh = IDENTIFIER_HASH_VALUE (p.second); |
| 2579 | h = iterative_hash_hashval_t (val: h, val2: nh); |
| 2580 | } |
| 2581 | return h; |
| 2582 | } |
| 2583 | inline static bool equal (const value_type b, const compare_type &p) |
| 2584 | { |
| 2585 | if (b->entity != p.first) |
| 2586 | return false; |
| 2587 | |
| 2588 | if (p.second) |
| 2589 | return b->discriminator == reinterpret_cast<uintptr_t> (p.second); |
| 2590 | else |
| 2591 | return !b->is_binding (); |
| 2592 | } |
| 2593 | |
| 2594 | /* (re)hasher for a binding itself. */ |
| 2595 | inline static hashval_t hash (const value_type b) |
| 2596 | { |
| 2597 | hashval_t h = pointer_hash<tree_node>::hash (candidate: b->entity); |
| 2598 | if (b->is_binding ()) |
| 2599 | { |
| 2600 | hashval_t nh = IDENTIFIER_HASH_VALUE (b->get_name ()); |
| 2601 | h = iterative_hash_hashval_t (val: h, val2: nh); |
| 2602 | } |
| 2603 | return h; |
| 2604 | } |
| 2605 | |
| 2606 | /* Empty via NULL. */ |
| 2607 | static inline void mark_empty (value_type &p) {p = NULL;} |
| 2608 | static inline bool is_empty (value_type p) {return !p;} |
| 2609 | |
| 2610 | /* Nothing is deletable. Everything is insertable. */ |
| 2611 | static bool is_deleted (value_type) { return false; } |
| 2612 | static void mark_deleted (value_type) { gcc_unreachable (); } |
| 2613 | |
| 2614 | /* We own the entities in the hash table. */ |
| 2615 | static void remove (value_type p) |
| 2616 | { |
| 2617 | delete (p); |
| 2618 | } |
| 2619 | }; |
| 2620 | |
| 2621 | public: |
| 2622 | class hash : public hash_table<traits> { |
| 2623 | typedef traits::compare_type key_t; |
| 2624 | typedef hash_table<traits> parent; |
| 2625 | |
| 2626 | public: |
| 2627 | vec<depset *> worklist; /* Worklist of decls to walk. */ |
| 2628 | hash *chain; /* Original table. */ |
| 2629 | depset *current; /* Current depset being depended. */ |
| 2630 | unsigned section; /* When writing out, the section. */ |
| 2631 | bool reached_unreached; /* We reached an unreached entity. */ |
| 2632 | bool writing_merge_key; /* We're writing merge key information. */ |
| 2633 | |
| 2634 | private: |
| 2635 | bool ignore_exposure; /* In a context where referencing a TU-local |
| 2636 | entity is not an exposure. */ |
| 2637 | |
| 2638 | private: |
| 2639 | /* Information needed to do dependent ADL for discovering |
| 2640 | more decl-reachable entities. Cached during walking to |
| 2641 | prevent tree marking from interfering with lookup. */ |
| 2642 | struct dep_adl_info { |
| 2643 | /* The name of the call or operator. */ |
| 2644 | tree name = NULL_TREE; |
| 2645 | /* If not ERROR_MARK, a rewrite candidate for this operator. */ |
| 2646 | tree_code rewrite = ERROR_MARK; |
| 2647 | /* Argument list for the call. */ |
| 2648 | vec<tree, va_gc>* args = make_tree_vector (); |
| 2649 | }; |
| 2650 | vec<dep_adl_info> dep_adl_entity_list; |
| 2651 | |
| 2652 | public: |
| 2653 | hash (size_t size, hash *c = NULL) |
| 2654 | : parent (size), chain (c), current (NULL), section (0), |
| 2655 | reached_unreached (false), writing_merge_key (false), |
| 2656 | ignore_exposure (false) |
| 2657 | { |
| 2658 | worklist.create (nelems: size); |
| 2659 | dep_adl_entity_list.create (nelems: 16); |
| 2660 | } |
| 2661 | ~hash () |
| 2662 | { |
| 2663 | worklist.release (); |
| 2664 | dep_adl_entity_list.release (); |
| 2665 | } |
| 2666 | |
| 2667 | public: |
| 2668 | bool is_key_order () const |
| 2669 | { |
| 2670 | return chain != NULL; |
| 2671 | } |
| 2672 | |
| 2673 | public: |
| 2674 | /* Returns a temporary override that will additionally consider this |
| 2675 | to be a context where exposures of TU-local entities are ignored |
| 2676 | if COND is true. */ |
| 2677 | temp_override<bool> ignore_exposure_if (bool cond) |
| 2678 | { |
| 2679 | return make_temp_override (var&: ignore_exposure, overrider: ignore_exposure || cond); |
| 2680 | } |
| 2681 | |
| 2682 | private: |
| 2683 | depset **entity_slot (tree entity, bool = true); |
| 2684 | depset **binding_slot (tree ctx, tree name, bool = true); |
| 2685 | depset *maybe_add_declaration (tree decl); |
| 2686 | |
| 2687 | public: |
| 2688 | depset *find_dependency (tree entity); |
| 2689 | depset *find_binding (tree ctx, tree name); |
| 2690 | depset *make_dependency (tree decl, entity_kind); |
| 2691 | void add_dependency (depset *); |
| 2692 | |
| 2693 | public: |
| 2694 | void add_mergeable (depset *); |
| 2695 | depset *add_dependency (tree decl, entity_kind); |
| 2696 | void add_namespace_context (depset *, tree ns); |
| 2697 | |
| 2698 | private: |
| 2699 | static bool add_binding_entity (tree, WMB_Flags, void *); |
| 2700 | |
| 2701 | public: |
| 2702 | bool add_namespace_entities (tree ns, bitmap partitions); |
| 2703 | void add_specializations (bool decl_p); |
| 2704 | void add_partial_entities (vec<tree, va_gc> *); |
| 2705 | void add_class_entities (vec<tree, va_gc> *); |
| 2706 | void add_dependent_adl_entities (tree expr); |
| 2707 | |
| 2708 | private: |
| 2709 | void add_deduction_guides (tree decl); |
| 2710 | |
| 2711 | public: |
| 2712 | void find_dependencies (module_state *); |
| 2713 | bool finalize_dependencies (); |
| 2714 | vec<depset *> connect (); |
| 2715 | |
| 2716 | private: |
| 2717 | bool diagnose_bad_internal_ref (depset *dep, bool strict = false); |
| 2718 | bool diagnose_template_names_tu_local (depset *dep, bool strict = false); |
| 2719 | }; |
| 2720 | |
| 2721 | public: |
| 2722 | struct tarjan { |
| 2723 | vec<depset *> result; |
| 2724 | vec<depset *> stack; |
| 2725 | unsigned index; |
| 2726 | |
| 2727 | tarjan (unsigned size) |
| 2728 | : index (0) |
| 2729 | { |
| 2730 | result.create (nelems: size); |
| 2731 | stack.create (nelems: 50); |
| 2732 | } |
| 2733 | ~tarjan () |
| 2734 | { |
| 2735 | gcc_assert (!stack.length ()); |
| 2736 | stack.release (); |
| 2737 | } |
| 2738 | |
| 2739 | public: |
| 2740 | void connect (depset *); |
| 2741 | }; |
| 2742 | }; |
| 2743 | |
| 2744 | inline |
| 2745 | depset::depset (tree entity) |
| 2746 | :entity (entity), discriminator (0), cluster (0), section (0) |
| 2747 | { |
| 2748 | deps.create (nelems: 0); |
| 2749 | } |
| 2750 | |
| 2751 | inline |
| 2752 | depset::~depset () |
| 2753 | { |
| 2754 | deps.release (); |
| 2755 | } |
| 2756 | |
| 2757 | const char * |
| 2758 | depset::entity_kind_name () const |
| 2759 | { |
| 2760 | /* Same order as entity_kind. */ |
| 2761 | static const char *const names[] = |
| 2762 | {"decl" , "specialization" , "partial" , "using" , |
| 2763 | "namespace" , "tu-local" , "redirect" , "binding" }; |
| 2764 | static_assert (ARRAY_SIZE (names) == EK_EXPLICIT_HWM + 1, |
| 2765 | "names must have an entry for every explicit entity_kind" ); |
| 2766 | entity_kind kind = get_entity_kind (); |
| 2767 | gcc_checking_assert (kind < ARRAY_SIZE (names)); |
| 2768 | return names[kind]; |
| 2769 | } |
| 2770 | |
| 2771 | /* Create a depset for a namespace binding NS::NAME. */ |
| 2772 | |
| 2773 | depset *depset::make_binding (tree ns, tree name) |
| 2774 | { |
| 2775 | depset *binding = new depset (ns); |
| 2776 | |
| 2777 | binding->discriminator = reinterpret_cast <uintptr_t> (name); |
| 2778 | |
| 2779 | return binding; |
| 2780 | } |
| 2781 | |
| 2782 | depset *depset::make_entity (tree entity, entity_kind ek, bool is_defn) |
| 2783 | { |
| 2784 | depset *r = new depset (entity); |
| 2785 | |
| 2786 | r->discriminator = ((1 << DB_ZERO_BIT) |
| 2787 | | (ek << DB_KIND_BIT) |
| 2788 | | is_defn << DB_DEFN_BIT); |
| 2789 | |
| 2790 | return r; |
| 2791 | } |
| 2792 | |
| 2793 | class pending_key |
| 2794 | { |
| 2795 | public: |
| 2796 | tree ns; |
| 2797 | tree id; |
| 2798 | }; |
| 2799 | |
| 2800 | template<> |
| 2801 | struct default_hash_traits<pending_key> |
| 2802 | { |
| 2803 | using value_type = pending_key; |
| 2804 | |
| 2805 | static const bool empty_zero_p = false; |
| 2806 | static hashval_t hash (const value_type &k) |
| 2807 | { |
| 2808 | hashval_t h = IDENTIFIER_HASH_VALUE (k.id); |
| 2809 | h = iterative_hash_hashval_t (DECL_UID (k.ns), val2: h); |
| 2810 | |
| 2811 | return h; |
| 2812 | } |
| 2813 | static bool equal (const value_type &k, const value_type &l) |
| 2814 | { |
| 2815 | return k.ns == l.ns && k.id == l.id; |
| 2816 | } |
| 2817 | static void mark_empty (value_type &k) |
| 2818 | { |
| 2819 | k.ns = k.id = NULL_TREE; |
| 2820 | } |
| 2821 | static void mark_deleted (value_type &k) |
| 2822 | { |
| 2823 | k.ns = NULL_TREE; |
| 2824 | gcc_checking_assert (k.id); |
| 2825 | } |
| 2826 | static bool is_empty (const value_type &k) |
| 2827 | { |
| 2828 | return k.ns == NULL_TREE && k.id == NULL_TREE; |
| 2829 | } |
| 2830 | static bool is_deleted (const value_type &k) |
| 2831 | { |
| 2832 | return k.ns == NULL_TREE && k.id != NULL_TREE; |
| 2833 | } |
| 2834 | static void remove (value_type &) |
| 2835 | { |
| 2836 | } |
| 2837 | }; |
| 2838 | |
| 2839 | typedef hash_map<pending_key, auto_vec<unsigned>> pending_map_t; |
| 2840 | |
| 2841 | /* Not-loaded entities that are keyed to a namespace-scope |
| 2842 | identifier. See module_state::write_pendings for details. */ |
| 2843 | pending_map_t *pending_table; |
| 2844 | |
| 2845 | /* Decls that need some post processing once a batch of lazy loads has |
| 2846 | completed. */ |
| 2847 | vec<tree, va_heap, vl_embed> *post_load_decls; |
| 2848 | |
| 2849 | /* Some entities are keyed to another entitity for ODR purposes. |
| 2850 | For example, at namespace scope, 'inline auto var = []{};', that |
| 2851 | lambda is keyed to 'var', and follows its ODRness. */ |
| 2852 | typedef hash_map<tree, auto_vec<tree>> keyed_map_t; |
| 2853 | static keyed_map_t *keyed_table; |
| 2854 | |
| 2855 | static tree get_keyed_decl_scope (tree); |
| 2856 | |
| 2857 | /* Instantiations of temploid friends imported from another module |
| 2858 | need to be attached to the same module as the temploid. This maps |
| 2859 | these decls to the temploid they are instantiated from, as there is |
| 2860 | no other easy way to get this information. */ |
| 2861 | static GTY((cache)) decl_tree_cache_map *imported_temploid_friends; |
| 2862 | |
| 2863 | /********************************************************************/ |
| 2864 | /* Tree streaming. The tree streaming is very specific to the tree |
| 2865 | structures themselves. A tag indicates the kind of tree being |
| 2866 | streamed. -ve tags indicate backreferences to already-streamed |
| 2867 | trees. Backreferences are auto-numbered. */ |
| 2868 | |
| 2869 | /* Tree tags. */ |
| 2870 | enum tree_tag { |
| 2871 | tt_null, /* NULL_TREE. */ |
| 2872 | tt_tu_local, /* A TU-local entity. */ |
| 2873 | tt_fixed, /* Fixed vector index. */ |
| 2874 | |
| 2875 | tt_node, /* By-value node. */ |
| 2876 | tt_decl, /* By-value mergeable decl. */ |
| 2877 | tt_tpl_parm, /* Template parm. */ |
| 2878 | |
| 2879 | /* The ordering of the following 5 is relied upon in |
| 2880 | trees_out::tree_node. */ |
| 2881 | tt_id, /* Identifier node. */ |
| 2882 | tt_conv_id, /* Conversion operator name. */ |
| 2883 | tt_anon_id, /* Anonymous name. */ |
| 2884 | tt_lambda_id, /* Lambda name. */ |
| 2885 | tt_internal_id, /* Internal name. */ |
| 2886 | |
| 2887 | tt_typedef_type, /* A (possibly implicit) typedefed type. */ |
| 2888 | tt_derived_type, /* A type derived from another type. */ |
| 2889 | tt_variant_type, /* A variant of another type. */ |
| 2890 | |
| 2891 | tt_tinfo_var, /* Typeinfo object. */ |
| 2892 | tt_tinfo_typedef, /* Typeinfo typedef. */ |
| 2893 | tt_ptrmem_type, /* Pointer to member type. */ |
| 2894 | tt_nttp_var, /* NTTP_OBJECT VAR_DECL. */ |
| 2895 | |
| 2896 | tt_parm, /* Function parameter or result. */ |
| 2897 | tt_enum_value, /* An enum value. */ |
| 2898 | tt_enum_decl, /* An enum decl. */ |
| 2899 | tt_data_member, /* Data member/using-decl. */ |
| 2900 | |
| 2901 | tt_binfo, /* A BINFO. */ |
| 2902 | tt_vtable, /* A vtable. */ |
| 2903 | tt_thunk, /* A thunk. */ |
| 2904 | tt_clone_ref, |
| 2905 | |
| 2906 | tt_entity, /* A extra-cluster entity. */ |
| 2907 | |
| 2908 | tt_template, /* The TEMPLATE_RESULT of a template. */ |
| 2909 | }; |
| 2910 | |
| 2911 | enum walk_kind { |
| 2912 | WK_none, /* No walk to do (a back- or fixed-ref happened). */ |
| 2913 | WK_normal, /* Normal walk (by-name if possible). */ |
| 2914 | |
| 2915 | WK_value, /* By-value walk. */ |
| 2916 | }; |
| 2917 | |
| 2918 | enum merge_kind |
| 2919 | { |
| 2920 | MK_unique, /* Known unique. */ |
| 2921 | MK_named, /* Found by CTX, NAME + maybe_arg types etc. */ |
| 2922 | MK_field, /* Found by CTX and index on TYPE_FIELDS */ |
| 2923 | MK_vtable, /* Found by CTX and index on TYPE_VTABLES */ |
| 2924 | MK_as_base, /* Found by CTX. */ |
| 2925 | |
| 2926 | MK_partial, |
| 2927 | |
| 2928 | MK_enum, /* Found by CTX, & 1stMemberNAME. */ |
| 2929 | MK_keyed, /* Found by key & index. */ |
| 2930 | MK_local_type, /* Found by CTX, index. */ |
| 2931 | |
| 2932 | MK_friend_spec, /* Like named, but has a tmpl & args too. */ |
| 2933 | MK_local_friend, /* Found by CTX, index. */ |
| 2934 | |
| 2935 | MK_indirect_lwm = MK_enum, |
| 2936 | |
| 2937 | /* Template specialization kinds below. These are all found via |
| 2938 | primary template and specialization args. */ |
| 2939 | MK_template_mask = 0x10, /* A template specialization. */ |
| 2940 | |
| 2941 | MK_tmpl_decl_mask = 0x4, /* In decl table. */ |
| 2942 | |
| 2943 | MK_tmpl_tmpl_mask = 0x1, /* We want TEMPLATE_DECL. */ |
| 2944 | |
| 2945 | MK_type_spec = MK_template_mask, |
| 2946 | MK_decl_spec = MK_template_mask | MK_tmpl_decl_mask, |
| 2947 | |
| 2948 | MK_hwm = 0x20 |
| 2949 | }; |
| 2950 | /* This is more than a debugging array. NULLs are used to determine |
| 2951 | an invalid merge_kind number. */ |
| 2952 | static char const *const merge_kind_name[MK_hwm] = |
| 2953 | { |
| 2954 | "unique" , "named" , "field" , "vtable" , /* 0...3 */ |
| 2955 | "asbase" , "partial" , "enum" , "attached" , /* 4...7 */ |
| 2956 | |
| 2957 | "local type" , "friend spec" , "local friend" , NULL, /* 8...11 */ |
| 2958 | NULL, NULL, NULL, NULL, |
| 2959 | |
| 2960 | "type spec" , "type tmpl spec" , /* 16,17 type (template). */ |
| 2961 | NULL, NULL, |
| 2962 | |
| 2963 | "decl spec" , "decl tmpl spec" , /* 20,21 decl (template). */ |
| 2964 | NULL, NULL, |
| 2965 | NULL, NULL, NULL, NULL, |
| 2966 | NULL, NULL, NULL, NULL, |
| 2967 | }; |
| 2968 | |
| 2969 | /* Mergeable entity location data. */ |
| 2970 | struct merge_key { |
| 2971 | cp_ref_qualifier ref_q : 2; |
| 2972 | unsigned coro_disc : 2; /* Discriminator for coroutine transforms. */ |
| 2973 | unsigned index; |
| 2974 | |
| 2975 | tree ret; /* Return type, if appropriate. */ |
| 2976 | tree args; /* Arg types, if appropriate. */ |
| 2977 | |
| 2978 | tree constraints; /* Constraints. */ |
| 2979 | |
| 2980 | merge_key () |
| 2981 | :ref_q (REF_QUAL_NONE), coro_disc (0), index (0), |
| 2982 | ret (NULL_TREE), args (NULL_TREE), |
| 2983 | constraints (NULL_TREE) |
| 2984 | { |
| 2985 | } |
| 2986 | }; |
| 2987 | |
| 2988 | /* Hashmap of merged duplicates. Usually decls, but can contain |
| 2989 | BINFOs. */ |
| 2990 | typedef hash_map<tree,uintptr_t, |
| 2991 | simple_hashmap_traits<nodel_ptr_hash<tree_node>,uintptr_t> > |
| 2992 | duplicate_hash_map; |
| 2993 | |
| 2994 | /* Data needed for post-processing. */ |
| 2995 | struct post_process_data { |
| 2996 | tree decl; |
| 2997 | location_t start_locus; |
| 2998 | location_t end_locus; |
| 2999 | bool returns_value; |
| 3000 | bool returns_null; |
| 3001 | bool returns_abnormally; |
| 3002 | bool infinite_loop; |
| 3003 | }; |
| 3004 | |
| 3005 | /* Tree stream reader. Note that reading a stream doesn't mark the |
| 3006 | read trees with TREE_VISITED. Thus it's quite safe to have |
| 3007 | multiple concurrent readers. Which is good, because lazy |
| 3008 | loading. |
| 3009 | |
| 3010 | It's important that trees_in/out have internal linkage so that the |
| 3011 | compiler knows core_bools, lang_type_bools and lang_decl_bools have |
| 3012 | only a single caller (tree_node_bools) and inlines them appropriately. */ |
| 3013 | namespace { |
| 3014 | class trees_in : public bytes_in { |
| 3015 | typedef bytes_in parent; |
| 3016 | |
| 3017 | private: |
| 3018 | module_state *state; /* Module being imported. */ |
| 3019 | vec<tree> back_refs; /* Back references. */ |
| 3020 | duplicate_hash_map *duplicates; /* Map from existings to duplicate. */ |
| 3021 | vec<post_process_data> post_decls; /* Decls to post process. */ |
| 3022 | vec<tree> post_types; /* Types to post process. */ |
| 3023 | unsigned unused; /* Inhibit any interior TREE_USED |
| 3024 | marking. */ |
| 3025 | |
| 3026 | public: |
| 3027 | trees_in (module_state *); |
| 3028 | ~trees_in (); |
| 3029 | |
| 3030 | public: |
| 3031 | int insert (tree); |
| 3032 | tree back_ref (int); |
| 3033 | |
| 3034 | private: |
| 3035 | tree start (unsigned = 0); |
| 3036 | |
| 3037 | public: |
| 3038 | /* Needed for binfo writing */ |
| 3039 | bool core_bools (tree, bits_in&); |
| 3040 | |
| 3041 | private: |
| 3042 | /* Stream tree_core, lang_decl_specific and lang_type_specific |
| 3043 | bits. */ |
| 3044 | bool core_vals (tree); |
| 3045 | bool lang_type_bools (tree, bits_in&); |
| 3046 | bool lang_type_vals (tree); |
| 3047 | bool lang_decl_bools (tree, bits_in&); |
| 3048 | bool lang_decl_vals (tree); |
| 3049 | bool lang_vals (tree); |
| 3050 | bool tree_node_bools (tree); |
| 3051 | bool tree_node_vals (tree); |
| 3052 | tree tree_value (); |
| 3053 | tree decl_value (); |
| 3054 | tree tpl_parm_value (); |
| 3055 | |
| 3056 | private: |
| 3057 | tree chained_decls (); /* Follow DECL_CHAIN. */ |
| 3058 | vec<tree, va_heap> *vec_chained_decls (); |
| 3059 | vec<tree, va_gc> *tree_vec (); /* vec of tree. */ |
| 3060 | vec<tree_pair_s, va_gc> *tree_pair_vec (); /* vec of tree_pair. */ |
| 3061 | tree tree_list (bool has_purpose); |
| 3062 | |
| 3063 | public: |
| 3064 | /* Read a tree node. */ |
| 3065 | tree tree_node (bool is_use = false); |
| 3066 | |
| 3067 | private: |
| 3068 | bool install_entity (tree decl); |
| 3069 | tree tpl_parms (unsigned &tpl_levels); |
| 3070 | bool tpl_parms_fini (tree decl, unsigned tpl_levels); |
| 3071 | bool tpl_header (tree decl, unsigned *tpl_levels); |
| 3072 | int fn_parms_init (tree); |
| 3073 | void fn_parms_fini (int tag, tree fn, tree existing, bool has_defn); |
| 3074 | unsigned add_indirect_tpl_parms (tree); |
| 3075 | public: |
| 3076 | bool add_indirects (tree); |
| 3077 | |
| 3078 | public: |
| 3079 | /* Serialize various definitions. */ |
| 3080 | bool read_definition (tree decl); |
| 3081 | |
| 3082 | private: |
| 3083 | void check_abi_tags (tree existing, tree decl, tree &eattr, tree &dattr); |
| 3084 | bool is_matching_decl (tree existing, tree decl, bool is_typedef); |
| 3085 | static bool install_implicit_member (tree decl); |
| 3086 | bool read_function_def (tree decl, tree maybe_template); |
| 3087 | bool read_var_def (tree decl, tree maybe_template); |
| 3088 | bool read_class_def (tree decl, tree maybe_template); |
| 3089 | bool read_enum_def (tree decl, tree maybe_template); |
| 3090 | |
| 3091 | public: |
| 3092 | tree decl_container (); |
| 3093 | tree key_mergeable (int tag, merge_kind, tree decl, tree inner, tree type, |
| 3094 | tree container, bool is_attached, |
| 3095 | bool is_imported_temploid_friend); |
| 3096 | unsigned binfo_mergeable (tree *); |
| 3097 | |
| 3098 | private: |
| 3099 | tree key_local_type (const merge_key&, tree, tree); |
| 3100 | uintptr_t *find_duplicate (tree existing); |
| 3101 | void register_duplicate (tree decl, tree existing); |
| 3102 | /* Mark as an already diagnosed bad duplicate. */ |
| 3103 | void unmatched_duplicate (tree existing) |
| 3104 | { |
| 3105 | *find_duplicate (existing) |= 1; |
| 3106 | } |
| 3107 | |
| 3108 | public: |
| 3109 | bool is_duplicate (tree decl) |
| 3110 | { |
| 3111 | return find_duplicate (existing: decl) != NULL; |
| 3112 | } |
| 3113 | tree maybe_duplicate (tree decl) |
| 3114 | { |
| 3115 | if (uintptr_t *dup = find_duplicate (existing: decl)) |
| 3116 | return reinterpret_cast<tree> (*dup & ~uintptr_t (1)); |
| 3117 | return decl; |
| 3118 | } |
| 3119 | tree odr_duplicate (tree decl, bool has_defn); |
| 3120 | |
| 3121 | public: |
| 3122 | /* Return the decls to postprocess. */ |
| 3123 | const vec<post_process_data>& post_process () |
| 3124 | { |
| 3125 | return post_decls; |
| 3126 | } |
| 3127 | /* Return the types to postprocess. */ |
| 3128 | const vec<tree>& post_process_type () |
| 3129 | { |
| 3130 | return post_types; |
| 3131 | } |
| 3132 | private: |
| 3133 | /* Register DATA for postprocessing. */ |
| 3134 | void post_process (post_process_data data) |
| 3135 | { |
| 3136 | post_decls.safe_push (obj: data); |
| 3137 | } |
| 3138 | /* Register TYPE for postprocessing. */ |
| 3139 | void post_process_type (tree type) |
| 3140 | { |
| 3141 | gcc_checking_assert (TYPE_P (type)); |
| 3142 | post_types.safe_push (obj: type); |
| 3143 | } |
| 3144 | |
| 3145 | private: |
| 3146 | void assert_definition (tree, bool installing); |
| 3147 | }; |
| 3148 | } // anon namespace |
| 3149 | |
| 3150 | trees_in::trees_in (module_state *state) |
| 3151 | :parent (), state (state), unused (0) |
| 3152 | { |
| 3153 | duplicates = NULL; |
| 3154 | back_refs.create (nelems: 500); |
| 3155 | post_decls.create (nelems: 0); |
| 3156 | post_types.create (nelems: 0); |
| 3157 | } |
| 3158 | |
| 3159 | trees_in::~trees_in () |
| 3160 | { |
| 3161 | delete (duplicates); |
| 3162 | back_refs.release (); |
| 3163 | post_decls.release (); |
| 3164 | post_types.release (); |
| 3165 | } |
| 3166 | |
| 3167 | /* Tree stream writer. */ |
| 3168 | namespace { |
| 3169 | class trees_out : public bytes_out { |
| 3170 | typedef bytes_out parent; |
| 3171 | |
| 3172 | private: |
| 3173 | module_state *state; /* The module we are writing. */ |
| 3174 | ptr_int_hash_map tree_map; /* Trees to references */ |
| 3175 | depset::hash *dep_hash; /* Dependency table. */ |
| 3176 | int ref_num; /* Back reference number. */ |
| 3177 | unsigned section; |
| 3178 | bool writing_local_entities; /* Whether we might walk into a TU-local |
| 3179 | entity we need to emit placeholders for. */ |
| 3180 | bool walking_bit_field_unit; /* Whether we're walking the underlying |
| 3181 | storage for a bit field. There's no other |
| 3182 | great way to detect this. */ |
| 3183 | #if CHECKING_P |
| 3184 | int importedness; /* Checker that imports not occurring |
| 3185 | inappropriately. +ve imports ok, |
| 3186 | -ve imports not ok. */ |
| 3187 | #endif |
| 3188 | |
| 3189 | public: |
| 3190 | trees_out (allocator *, module_state *, depset::hash &deps, unsigned sec = 0); |
| 3191 | ~trees_out (); |
| 3192 | |
| 3193 | private: |
| 3194 | void mark_trees (); |
| 3195 | void unmark_trees (); |
| 3196 | |
| 3197 | public: |
| 3198 | /* Hey, let's ignore the well known STL iterator idiom. */ |
| 3199 | void begin (); |
| 3200 | unsigned end (elf_out *sink, unsigned name, unsigned *crc_ptr); |
| 3201 | void end (); |
| 3202 | |
| 3203 | public: |
| 3204 | enum tags |
| 3205 | { |
| 3206 | tag_backref = -1, /* Upper bound on the backrefs. */ |
| 3207 | tag_value = 0, /* Write by value. */ |
| 3208 | tag_fixed /* Lower bound on the fixed trees. */ |
| 3209 | }; |
| 3210 | |
| 3211 | public: |
| 3212 | /* The walk is used for three similar purposes: |
| 3213 | |
| 3214 | 1. The initial scan for dependencies. |
| 3215 | 2. Once dependencies have been found, ordering them. |
| 3216 | 3. Writing dependencies to file (streaming_p). |
| 3217 | |
| 3218 | For cases where it matters, these accessers can be used to determine |
| 3219 | which state we're in. */ |
| 3220 | bool is_initial_scan () const |
| 3221 | { |
| 3222 | return !streaming_p () && !is_key_order (); |
| 3223 | } |
| 3224 | bool is_key_order () const |
| 3225 | { |
| 3226 | return dep_hash->is_key_order (); |
| 3227 | } |
| 3228 | |
| 3229 | public: |
| 3230 | int insert (tree, walk_kind = WK_normal); |
| 3231 | |
| 3232 | private: |
| 3233 | void start (tree, bool = false); |
| 3234 | |
| 3235 | private: |
| 3236 | walk_kind ref_node (tree); |
| 3237 | public: |
| 3238 | int get_tag (tree); |
| 3239 | void set_importing (int i ATTRIBUTE_UNUSED) |
| 3240 | { |
| 3241 | #if CHECKING_P |
| 3242 | importedness = i; |
| 3243 | #endif |
| 3244 | } |
| 3245 | |
| 3246 | private: |
| 3247 | void core_bools (tree, bits_out&); |
| 3248 | void core_vals (tree); |
| 3249 | void lang_type_bools (tree, bits_out&); |
| 3250 | void lang_type_vals (tree); |
| 3251 | void lang_decl_bools (tree, bits_out&); |
| 3252 | void lang_decl_vals (tree); |
| 3253 | void lang_vals (tree); |
| 3254 | void tree_node_bools (tree); |
| 3255 | void tree_node_vals (tree); |
| 3256 | |
| 3257 | private: |
| 3258 | void chained_decls (tree); |
| 3259 | void vec_chained_decls (tree); |
| 3260 | void tree_vec (vec<tree, va_gc> *); |
| 3261 | void tree_pair_vec (vec<tree_pair_s, va_gc> *); |
| 3262 | void tree_list (tree, bool has_purpose); |
| 3263 | |
| 3264 | private: |
| 3265 | bool has_tu_local_dep (tree) const; |
| 3266 | tree find_tu_local_decl (tree); |
| 3267 | |
| 3268 | public: |
| 3269 | /* Mark a node for by-value walking. */ |
| 3270 | void mark_by_value (tree); |
| 3271 | |
| 3272 | public: |
| 3273 | void tree_node (tree); |
| 3274 | |
| 3275 | private: |
| 3276 | void install_entity (tree decl, depset *); |
| 3277 | void tpl_parms (tree parms, unsigned &tpl_levels); |
| 3278 | void tpl_parms_fini (tree decl, unsigned tpl_levels); |
| 3279 | void fn_parms_fini (tree) {} |
| 3280 | unsigned add_indirect_tpl_parms (tree); |
| 3281 | public: |
| 3282 | void add_indirects (tree); |
| 3283 | void fn_parms_init (tree); |
| 3284 | void tpl_header (tree decl, unsigned *tpl_levels); |
| 3285 | |
| 3286 | public: |
| 3287 | merge_kind get_merge_kind (tree decl, depset *maybe_dep); |
| 3288 | tree decl_container (tree decl); |
| 3289 | void key_mergeable (int tag, merge_kind, tree decl, tree inner, |
| 3290 | tree container, depset *maybe_dep); |
| 3291 | void binfo_mergeable (tree binfo); |
| 3292 | |
| 3293 | private: |
| 3294 | void key_local_type (merge_key&, tree, tree); |
| 3295 | bool decl_node (tree, walk_kind ref); |
| 3296 | void type_node (tree); |
| 3297 | void tree_value (tree); |
| 3298 | void tpl_parm_value (tree); |
| 3299 | |
| 3300 | public: |
| 3301 | void decl_value (tree, depset *); |
| 3302 | |
| 3303 | public: |
| 3304 | /* Serialize various definitions. */ |
| 3305 | void write_definition (tree decl, bool refs_tu_local = false); |
| 3306 | void mark_declaration (tree decl, bool do_defn); |
| 3307 | |
| 3308 | private: |
| 3309 | void mark_function_def (tree decl); |
| 3310 | void mark_var_def (tree decl); |
| 3311 | void mark_class_def (tree decl); |
| 3312 | void mark_enum_def (tree decl); |
| 3313 | void mark_class_member (tree decl, bool do_defn = true); |
| 3314 | void mark_binfos (tree type); |
| 3315 | |
| 3316 | private: |
| 3317 | void write_var_def (tree decl); |
| 3318 | void write_function_def (tree decl); |
| 3319 | void write_class_def (tree decl); |
| 3320 | void write_enum_def (tree decl); |
| 3321 | |
| 3322 | private: |
| 3323 | static void assert_definition (tree); |
| 3324 | |
| 3325 | public: |
| 3326 | static void instrument (); |
| 3327 | |
| 3328 | private: |
| 3329 | /* Tree instrumentation. */ |
| 3330 | static unsigned tree_val_count; |
| 3331 | static unsigned decl_val_count; |
| 3332 | static unsigned back_ref_count; |
| 3333 | static unsigned tu_local_count; |
| 3334 | static unsigned null_count; |
| 3335 | }; |
| 3336 | } // anon namespace |
| 3337 | |
| 3338 | /* Instrumentation counters. */ |
| 3339 | unsigned trees_out::tree_val_count; |
| 3340 | unsigned trees_out::decl_val_count; |
| 3341 | unsigned trees_out::back_ref_count; |
| 3342 | unsigned trees_out::tu_local_count; |
| 3343 | unsigned trees_out::null_count; |
| 3344 | |
| 3345 | trees_out::trees_out (allocator *mem, module_state *state, depset::hash &deps, |
| 3346 | unsigned section) |
| 3347 | :parent (mem), state (state), tree_map (500), |
| 3348 | dep_hash (&deps), ref_num (0), section (section), |
| 3349 | writing_local_entities (false), walking_bit_field_unit (false) |
| 3350 | { |
| 3351 | #if CHECKING_P |
| 3352 | importedness = 0; |
| 3353 | #endif |
| 3354 | } |
| 3355 | |
| 3356 | trees_out::~trees_out () |
| 3357 | { |
| 3358 | } |
| 3359 | |
| 3360 | /********************************************************************/ |
| 3361 | /* Location. We're aware of the line-map concept and reproduce it |
| 3362 | here. Each imported module allocates a contiguous span of ordinary |
| 3363 | maps, and of macro maps. adhoc maps are serialized by contents, |
| 3364 | not pre-allocated. The scattered linemaps of a module are |
| 3365 | coalesced when writing. */ |
| 3366 | |
| 3367 | |
| 3368 | /* I use half-open [first,second) ranges. */ |
| 3369 | typedef std::pair<line_map_uint_t,line_map_uint_t> range_t; |
| 3370 | |
| 3371 | /* A range of locations. */ |
| 3372 | typedef std::pair<location_t,location_t> loc_range_t; |
| 3373 | |
| 3374 | /* Spans of the line maps that are occupied by this TU. I.e. not |
| 3375 | within imports. Only extended when in an interface unit. |
| 3376 | Interval zero corresponds to the forced header linemap(s). This |
| 3377 | is a singleton object. */ |
| 3378 | |
| 3379 | class loc_spans { |
| 3380 | public: |
| 3381 | /* An interval of line maps. The line maps here represent a contiguous |
| 3382 | non-imported range. */ |
| 3383 | struct span { |
| 3384 | loc_range_t ordinary; /* Ordinary map location range. */ |
| 3385 | loc_range_t macro; /* Macro map location range. */ |
| 3386 | /* Add to locs to get serialized loc. */ |
| 3387 | location_diff_t ordinary_delta; |
| 3388 | location_diff_t macro_delta; |
| 3389 | }; |
| 3390 | |
| 3391 | private: |
| 3392 | vec<span> *spans; |
| 3393 | bool locs_exhausted_p; |
| 3394 | |
| 3395 | public: |
| 3396 | loc_spans () |
| 3397 | /* Do not preallocate spans, as that causes |
| 3398 | --enable-detailed-mem-stats problems. */ |
| 3399 | : spans (nullptr), locs_exhausted_p (false) |
| 3400 | { |
| 3401 | } |
| 3402 | ~loc_spans () |
| 3403 | { |
| 3404 | delete spans; |
| 3405 | } |
| 3406 | |
| 3407 | public: |
| 3408 | span &operator[] (unsigned ix) |
| 3409 | { |
| 3410 | return (*spans)[ix]; |
| 3411 | } |
| 3412 | unsigned length () const |
| 3413 | { |
| 3414 | return spans->length (); |
| 3415 | } |
| 3416 | |
| 3417 | public: |
| 3418 | bool init_p () const |
| 3419 | { |
| 3420 | return spans != nullptr; |
| 3421 | } |
| 3422 | /* Initializer. */ |
| 3423 | void init (const line_maps *lmaps, const line_map_ordinary *map); |
| 3424 | |
| 3425 | /* Slightly skewed preprocessed files can cause us to miss an |
| 3426 | initialization in some places. Fallback initializer. */ |
| 3427 | void maybe_init () |
| 3428 | { |
| 3429 | if (!init_p ()) |
| 3430 | init (lmaps: line_table, map: nullptr); |
| 3431 | } |
| 3432 | |
| 3433 | public: |
| 3434 | enum { |
| 3435 | SPAN_RESERVED = 0, /* Reserved (fixed) locations. */ |
| 3436 | SPAN_FIRST = 1, /* LWM of locations to stream */ |
| 3437 | SPAN_MAIN = 2 /* Main file and onwards. */ |
| 3438 | }; |
| 3439 | |
| 3440 | public: |
| 3441 | location_t main_start () const |
| 3442 | { |
| 3443 | return (*spans)[SPAN_MAIN].ordinary.first; |
| 3444 | } |
| 3445 | |
| 3446 | public: |
| 3447 | void open (location_t); |
| 3448 | void close (); |
| 3449 | |
| 3450 | public: |
| 3451 | /* Propagate imported linemaps to us, if needed. */ |
| 3452 | bool maybe_propagate (module_state *import, location_t loc); |
| 3453 | |
| 3454 | public: |
| 3455 | /* Whether we can no longer represent new imported locations. */ |
| 3456 | bool locations_exhausted_p () const |
| 3457 | { |
| 3458 | return locs_exhausted_p; |
| 3459 | } |
| 3460 | void report_location_exhaustion (location_t loc) |
| 3461 | { |
| 3462 | if (!locs_exhausted_p) |
| 3463 | { |
| 3464 | /* Just give the notice once. */ |
| 3465 | locs_exhausted_p = true; |
| 3466 | inform (loc, "unable to represent further imported source locations" ); |
| 3467 | } |
| 3468 | } |
| 3469 | |
| 3470 | public: |
| 3471 | const span *ordinary (location_t); |
| 3472 | const span *macro (location_t); |
| 3473 | }; |
| 3474 | |
| 3475 | static loc_spans spans; |
| 3476 | |
| 3477 | /* Information about ordinary locations we stream out. */ |
| 3478 | struct ord_loc_info |
| 3479 | { |
| 3480 | const line_map_ordinary *src; // line map we're based on |
| 3481 | line_map_uint_t offset; // offset to this line |
| 3482 | line_map_uint_t span; // number of locs we span |
| 3483 | line_map_uint_t remap; // serialization |
| 3484 | |
| 3485 | static int compare (const void *a_, const void *b_) |
| 3486 | { |
| 3487 | auto *a = static_cast<const ord_loc_info *> (a_); |
| 3488 | auto *b = static_cast<const ord_loc_info *> (b_); |
| 3489 | |
| 3490 | if (a->src != b->src) |
| 3491 | return a->src < b->src ? -1 : +1; |
| 3492 | |
| 3493 | // Ensure no overlap |
| 3494 | gcc_checking_assert (a->offset + a->span <= b->offset |
| 3495 | || b->offset + b->span <= a->offset); |
| 3496 | |
| 3497 | gcc_checking_assert (a->offset != b->offset); |
| 3498 | return a->offset < b->offset ? -1 : +1; |
| 3499 | } |
| 3500 | }; |
| 3501 | struct ord_loc_traits |
| 3502 | { |
| 3503 | typedef ord_loc_info value_type; |
| 3504 | typedef value_type compare_type; |
| 3505 | |
| 3506 | static const bool empty_zero_p = false; |
| 3507 | |
| 3508 | static hashval_t hash (const value_type &v) |
| 3509 | { |
| 3510 | auto h = pointer_hash<const line_map_ordinary>::hash (candidate: v.src); |
| 3511 | return iterative_hash_hashval_t (val: v.offset, val2: h); |
| 3512 | } |
| 3513 | static bool equal (const value_type &v, const compare_type p) |
| 3514 | { |
| 3515 | return v.src == p.src && v.offset == p.offset; |
| 3516 | } |
| 3517 | |
| 3518 | static void mark_empty (value_type &v) |
| 3519 | { |
| 3520 | v.src = nullptr; |
| 3521 | } |
| 3522 | static bool is_empty (value_type &v) |
| 3523 | { |
| 3524 | return !v.src; |
| 3525 | } |
| 3526 | |
| 3527 | static bool is_deleted (value_type &) { return false; } |
| 3528 | static void mark_deleted (value_type &) { gcc_unreachable (); } |
| 3529 | |
| 3530 | static void remove (value_type &) {} |
| 3531 | }; |
| 3532 | /* Table keyed by ord_loc_info, used for noting. */ |
| 3533 | static hash_table<ord_loc_traits> *ord_loc_table; |
| 3534 | /* Sorted vector, used for writing. */ |
| 3535 | static vec<ord_loc_info> *ord_loc_remap; |
| 3536 | |
| 3537 | /* Information about macro locations we stream out. */ |
| 3538 | struct macro_loc_info |
| 3539 | { |
| 3540 | const line_map_macro *src; // original expansion |
| 3541 | line_map_uint_t remap; // serialization |
| 3542 | |
| 3543 | static int compare (const void *a_, const void *b_) |
| 3544 | { |
| 3545 | auto *a = static_cast<const macro_loc_info *> (a_); |
| 3546 | auto *b = static_cast<const macro_loc_info *> (b_); |
| 3547 | |
| 3548 | gcc_checking_assert (MAP_START_LOCATION (a->src) |
| 3549 | != MAP_START_LOCATION (b->src)); |
| 3550 | if (MAP_START_LOCATION (map: a->src) < MAP_START_LOCATION (map: b->src)) |
| 3551 | return -1; |
| 3552 | else |
| 3553 | return +1; |
| 3554 | } |
| 3555 | }; |
| 3556 | struct macro_loc_traits |
| 3557 | { |
| 3558 | typedef macro_loc_info value_type; |
| 3559 | typedef const line_map_macro *compare_type; |
| 3560 | |
| 3561 | static const bool empty_zero_p = false; |
| 3562 | |
| 3563 | static hashval_t hash (compare_type p) |
| 3564 | { |
| 3565 | return pointer_hash<const line_map_macro>::hash (candidate: p); |
| 3566 | } |
| 3567 | static hashval_t hash (const value_type &v) |
| 3568 | { |
| 3569 | return hash (p: v.src); |
| 3570 | } |
| 3571 | static bool equal (const value_type &v, const compare_type p) |
| 3572 | { |
| 3573 | return v.src == p; |
| 3574 | } |
| 3575 | |
| 3576 | static void mark_empty (value_type &v) |
| 3577 | { |
| 3578 | v.src = nullptr; |
| 3579 | } |
| 3580 | static bool is_empty (value_type &v) |
| 3581 | { |
| 3582 | return !v.src; |
| 3583 | } |
| 3584 | |
| 3585 | static bool is_deleted (value_type &) { return false; } |
| 3586 | static void mark_deleted (value_type &) { gcc_unreachable (); } |
| 3587 | |
| 3588 | static void remove (value_type &) {} |
| 3589 | }; |
| 3590 | /* Table keyed by line_map_macro, used for noting. */ |
| 3591 | static hash_table<macro_loc_traits> *macro_loc_table; |
| 3592 | /* Sorted vector, used for writing. */ |
| 3593 | static vec<macro_loc_info> *macro_loc_remap; |
| 3594 | |
| 3595 | /* Indirection to allow bsearching imports by ordinary location. */ |
| 3596 | static vec<module_state *> *ool; |
| 3597 | |
| 3598 | /********************************************************************/ |
| 3599 | /* Data needed by a module during the process of loading. */ |
| 3600 | struct GTY(()) slurping { |
| 3601 | |
| 3602 | /* Remap import's module numbering to our numbering. Values are |
| 3603 | shifted by 1. Bit0 encodes if the import is direct. */ |
| 3604 | vec<unsigned, va_heap, vl_embed> * |
| 3605 | GTY((skip)) remap; /* Module owner remapping. */ |
| 3606 | |
| 3607 | elf_in *GTY((skip)) from; /* The elf loader. */ |
| 3608 | |
| 3609 | /* This map is only for header imports themselves -- the global |
| 3610 | headers bitmap hold it for the current TU. */ |
| 3611 | bitmap ; /* Transitive set of direct imports, including |
| 3612 | self. Used for macro visibility and |
| 3613 | priority. */ |
| 3614 | |
| 3615 | /* These objects point into the mmapped area, unless we're not doing |
| 3616 | that, or we got frozen or closed. In those cases they point to |
| 3617 | buffers we own. */ |
| 3618 | bytes_in macro_defs; /* Macro definitions. */ |
| 3619 | bytes_in macro_tbl; /* Macro table. */ |
| 3620 | |
| 3621 | /* Location remapping. first->ordinary, second->macro. */ |
| 3622 | range_t GTY((skip)) loc_deltas; |
| 3623 | |
| 3624 | unsigned current; /* Section currently being loaded. */ |
| 3625 | unsigned remaining; /* Number of lazy sections yet to read. */ |
| 3626 | unsigned lru; /* An LRU counter. */ |
| 3627 | |
| 3628 | public: |
| 3629 | slurping (elf_in *); |
| 3630 | ~slurping (); |
| 3631 | |
| 3632 | public: |
| 3633 | /* Close the ELF file, if it's open. */ |
| 3634 | void close () |
| 3635 | { |
| 3636 | if (from) |
| 3637 | { |
| 3638 | from->end (); |
| 3639 | delete from; |
| 3640 | from = NULL; |
| 3641 | } |
| 3642 | } |
| 3643 | |
| 3644 | public: |
| 3645 | void release_macros (); |
| 3646 | |
| 3647 | public: |
| 3648 | void alloc_remap (unsigned size) |
| 3649 | { |
| 3650 | gcc_assert (!remap); |
| 3651 | vec_safe_reserve (v&: remap, nelems: size); |
| 3652 | for (unsigned ix = size; ix--;) |
| 3653 | remap->quick_push (obj: 0); |
| 3654 | } |
| 3655 | unsigned remap_module (unsigned owner) |
| 3656 | { |
| 3657 | if (owner < remap->length ()) |
| 3658 | return (*remap)[owner] >> 1; |
| 3659 | return 0; |
| 3660 | } |
| 3661 | |
| 3662 | public: |
| 3663 | /* GC allocation. But we must explicitly delete it. */ |
| 3664 | static void *operator new (size_t x) |
| 3665 | { |
| 3666 | return ggc_alloc_atomic (s: x); |
| 3667 | } |
| 3668 | static void operator delete (void *p) |
| 3669 | { |
| 3670 | ggc_free (p); |
| 3671 | } |
| 3672 | }; |
| 3673 | |
| 3674 | slurping::slurping (elf_in *from) |
| 3675 | : remap (NULL), from (from), |
| 3676 | headers (BITMAP_GGC_ALLOC ()), macro_defs (), macro_tbl (), |
| 3677 | loc_deltas (0, 0), |
| 3678 | current (~0u), remaining (0), lru (0) |
| 3679 | { |
| 3680 | } |
| 3681 | |
| 3682 | slurping::~slurping () |
| 3683 | { |
| 3684 | vec_free (v&: remap); |
| 3685 | remap = NULL; |
| 3686 | release_macros (); |
| 3687 | close (); |
| 3688 | } |
| 3689 | |
| 3690 | void slurping::release_macros () |
| 3691 | { |
| 3692 | if (macro_defs.size) |
| 3693 | elf_in::release (self: from, bytes&: macro_defs); |
| 3694 | if (macro_tbl.size) |
| 3695 | elf_in::release (self: from, bytes&: macro_tbl); |
| 3696 | } |
| 3697 | |
| 3698 | /* Flags for extensions that end up being streamed. */ |
| 3699 | |
| 3700 | enum streamed_extensions { |
| 3701 | SE_OPENMP_SIMD = 1 << 0, |
| 3702 | SE_OPENMP = 1 << 1, |
| 3703 | SE_OPENACC = 1 << 2, |
| 3704 | SE_BITS = 3 |
| 3705 | }; |
| 3706 | |
| 3707 | /* Counter indices. */ |
| 3708 | enum module_state_counts |
| 3709 | { |
| 3710 | MSC_sec_lwm, |
| 3711 | MSC_sec_hwm, |
| 3712 | MSC_pendings, |
| 3713 | MSC_entities, |
| 3714 | MSC_namespaces, |
| 3715 | MSC_using_directives, |
| 3716 | MSC_bindings, |
| 3717 | MSC_macros, |
| 3718 | MSC_inits, |
| 3719 | MSC_HWM |
| 3720 | }; |
| 3721 | |
| 3722 | /********************************************************************/ |
| 3723 | struct module_state_config; |
| 3724 | |
| 3725 | /* Increasing levels of loadedness. */ |
| 3726 | enum module_loadedness { |
| 3727 | ML_NONE, /* Not loaded. */ |
| 3728 | ML_CONFIG, /* Config loaed. */ |
| 3729 | ML_PREPROCESSOR, /* Preprocessor loaded. */ |
| 3730 | ML_LANGUAGE, /* Language loaded. */ |
| 3731 | }; |
| 3732 | |
| 3733 | /* Increasing levels of directness (toplevel) of import. */ |
| 3734 | enum module_directness { |
| 3735 | MD_NONE, /* Not direct. */ |
| 3736 | MD_PARTITION_DIRECT, /* Direct import of a partition. */ |
| 3737 | MD_DIRECT, /* Direct import. */ |
| 3738 | MD_PURVIEW_DIRECT, /* direct import in purview. */ |
| 3739 | }; |
| 3740 | |
| 3741 | /* State of a particular module. */ |
| 3742 | |
| 3743 | class GTY((chain_next ("%h.parent" ), for_user)) module_state { |
| 3744 | public: |
| 3745 | /* We always import & export ourselves. */ |
| 3746 | bitmap imports; /* Transitive modules we're importing. */ |
| 3747 | bitmap exports; /* Subset of that, that we're exporting. */ |
| 3748 | |
| 3749 | /* For a named module interface A.B, parent is A and name is B. |
| 3750 | For a partition M:P, parent is M and name is P. |
| 3751 | For an implementation unit I, parent is I's interface and name is NULL. |
| 3752 | Otherwise parent is NULL and name will be the flatname. */ |
| 3753 | module_state *parent; |
| 3754 | tree name; |
| 3755 | |
| 3756 | slurping *slurp; /* Data for loading. */ |
| 3757 | |
| 3758 | const char *flatname; /* Flatname of module. */ |
| 3759 | char *filename; /* CMI Filename */ |
| 3760 | |
| 3761 | /* Indices into the entity_ary. */ |
| 3762 | unsigned entity_lwm; |
| 3763 | unsigned entity_num; |
| 3764 | |
| 3765 | /* Location ranges for this module. adhoc-locs are decomposed, so |
| 3766 | don't have a range. */ |
| 3767 | loc_range_t GTY((skip)) ordinary_locs; |
| 3768 | loc_range_t GTY((skip)) macro_locs; // [lwm,num) |
| 3769 | |
| 3770 | /* LOC is first set too the importing location. When initially |
| 3771 | loaded it refers to a module loc whose parent is the importing |
| 3772 | location. */ |
| 3773 | location_t loc; /* Location referring to module itself. */ |
| 3774 | unsigned crc; /* CRC we saw reading it in. */ |
| 3775 | |
| 3776 | unsigned mod; /* Module owner number. */ |
| 3777 | unsigned remap; /* Remapping during writing. */ |
| 3778 | |
| 3779 | unsigned short subst; /* Mangle subst if !0. */ |
| 3780 | |
| 3781 | /* How loaded this module is. */ |
| 3782 | enum module_loadedness loadedness : 2; |
| 3783 | |
| 3784 | bool module_p : 1; /* /The/ module of this TU. */ |
| 3785 | bool : 1; /* Is a header unit. */ |
| 3786 | bool interface_p : 1; /* An interface. */ |
| 3787 | bool partition_p : 1; /* A partition. */ |
| 3788 | |
| 3789 | /* How directly this module is imported. */ |
| 3790 | enum module_directness directness : 2; |
| 3791 | |
| 3792 | bool exported_p : 1; /* directness != MD_NONE && exported. */ |
| 3793 | bool cmi_noted_p : 1; /* We've told the user about the CMI, don't |
| 3794 | do it again */ |
| 3795 | bool active_init_p : 1; /* This module's global initializer needs |
| 3796 | calling. */ |
| 3797 | bool inform_cmi_p : 1; /* Inform of a read/write. */ |
| 3798 | bool visited_p : 1; /* A walk-once flag. */ |
| 3799 | /* Record extensions emitted or permitted. */ |
| 3800 | unsigned extensions : SE_BITS; |
| 3801 | /* 14 bits used, 2 bits remain */ |
| 3802 | |
| 3803 | public: |
| 3804 | module_state (tree name, module_state *, bool); |
| 3805 | ~module_state (); |
| 3806 | |
| 3807 | public: |
| 3808 | void release () |
| 3809 | { |
| 3810 | imports = exports = NULL; |
| 3811 | slurped (); |
| 3812 | } |
| 3813 | void slurped () |
| 3814 | { |
| 3815 | delete slurp; |
| 3816 | slurp = NULL; |
| 3817 | } |
| 3818 | elf_in *from () const |
| 3819 | { |
| 3820 | return slurp->from; |
| 3821 | } |
| 3822 | |
| 3823 | public: |
| 3824 | /* Kind of this module. */ |
| 3825 | bool is_module () const |
| 3826 | { |
| 3827 | return module_p; |
| 3828 | } |
| 3829 | bool () const |
| 3830 | { |
| 3831 | return header_p; |
| 3832 | } |
| 3833 | bool is_interface () const |
| 3834 | { |
| 3835 | return interface_p; |
| 3836 | } |
| 3837 | bool is_partition () const |
| 3838 | { |
| 3839 | return partition_p; |
| 3840 | } |
| 3841 | |
| 3842 | /* How this module is used in the current TU. */ |
| 3843 | bool is_exported () const |
| 3844 | { |
| 3845 | return exported_p; |
| 3846 | } |
| 3847 | bool is_direct () const |
| 3848 | { |
| 3849 | return directness >= MD_DIRECT; |
| 3850 | } |
| 3851 | bool is_purview_direct () const |
| 3852 | { |
| 3853 | return directness == MD_PURVIEW_DIRECT; |
| 3854 | } |
| 3855 | bool is_partition_direct () const |
| 3856 | { |
| 3857 | return directness == MD_PARTITION_DIRECT; |
| 3858 | } |
| 3859 | |
| 3860 | public: |
| 3861 | /* Is this a real module? */ |
| 3862 | bool has_location () const |
| 3863 | { |
| 3864 | return loc != UNKNOWN_LOCATION; |
| 3865 | } |
| 3866 | |
| 3867 | public: |
| 3868 | bool check_circular_import (location_t loc); |
| 3869 | |
| 3870 | public: |
| 3871 | void mangle (bool include_partition); |
| 3872 | |
| 3873 | public: |
| 3874 | void set_import (module_state const *, bool is_export); |
| 3875 | void announce (const char *) const; |
| 3876 | |
| 3877 | public: |
| 3878 | /* Read and write module. */ |
| 3879 | bool write_begin (elf_out *to, cpp_reader *, |
| 3880 | module_state_config &, unsigned &crc); |
| 3881 | void write_end (elf_out *to, cpp_reader *, |
| 3882 | module_state_config &, unsigned &crc); |
| 3883 | bool read_initial (cpp_reader *); |
| 3884 | bool read_preprocessor (bool); |
| 3885 | bool read_language (bool); |
| 3886 | |
| 3887 | public: |
| 3888 | /* Read a section. */ |
| 3889 | bool load_section (unsigned snum, binding_slot *mslot); |
| 3890 | /* Lazily read a section. */ |
| 3891 | bool lazy_load (unsigned index, binding_slot *mslot); |
| 3892 | |
| 3893 | public: |
| 3894 | /* Juggle a limited number of file numbers. */ |
| 3895 | static void freeze_an_elf (); |
| 3896 | bool maybe_defrost (); |
| 3897 | |
| 3898 | public: |
| 3899 | void maybe_completed_reading (); |
| 3900 | bool check_read (bool outermost, bool ok); |
| 3901 | |
| 3902 | private: |
| 3903 | /* The README, for human consumption. */ |
| 3904 | void write_readme (elf_out *to, cpp_reader *, const char *dialect); |
| 3905 | void write_env (elf_out *to); |
| 3906 | |
| 3907 | private: |
| 3908 | /* Import tables. */ |
| 3909 | void write_imports (bytes_out &cfg, bool direct); |
| 3910 | unsigned read_imports (bytes_in &cfg, cpp_reader *, line_maps *maps); |
| 3911 | |
| 3912 | private: |
| 3913 | void write_imports (elf_out *to, unsigned *crc_ptr); |
| 3914 | bool read_imports (cpp_reader *, line_maps *); |
| 3915 | |
| 3916 | private: |
| 3917 | void write_partitions (elf_out *to, unsigned, unsigned *crc_ptr); |
| 3918 | bool read_partitions (unsigned); |
| 3919 | |
| 3920 | private: |
| 3921 | void write_config (elf_out *to, struct module_state_config &, unsigned crc); |
| 3922 | bool read_config (struct module_state_config &, bool = true); |
| 3923 | static void write_counts (elf_out *to, unsigned [MSC_HWM], unsigned *crc_ptr); |
| 3924 | bool read_counts (unsigned *); |
| 3925 | |
| 3926 | public: |
| 3927 | void note_cmi_name (); |
| 3928 | |
| 3929 | private: |
| 3930 | static unsigned write_bindings (elf_out *to, vec<depset *> depsets, |
| 3931 | unsigned *crc_ptr); |
| 3932 | bool read_bindings (unsigned count, unsigned lwm, unsigned hwm); |
| 3933 | |
| 3934 | static void write_namespace (bytes_out &sec, depset *ns_dep); |
| 3935 | tree read_namespace (bytes_in &sec); |
| 3936 | |
| 3937 | void write_namespaces (elf_out *to, vec<depset *> spaces, |
| 3938 | unsigned, unsigned *crc_ptr); |
| 3939 | bool read_namespaces (unsigned); |
| 3940 | |
| 3941 | unsigned write_using_directives (elf_out *to, depset::hash &, |
| 3942 | vec<depset *> spaces, unsigned *crc_ptr); |
| 3943 | bool read_using_directives (unsigned); |
| 3944 | |
| 3945 | void intercluster_seed (trees_out &sec, unsigned index, depset *dep); |
| 3946 | unsigned write_cluster (elf_out *to, depset *depsets[], unsigned size, |
| 3947 | depset::hash &, unsigned *counts, unsigned *crc_ptr); |
| 3948 | bool read_cluster (unsigned snum); |
| 3949 | bool open_slurp (cpp_reader *); |
| 3950 | |
| 3951 | private: |
| 3952 | unsigned write_inits (elf_out *to, depset::hash &, unsigned *crc_ptr); |
| 3953 | bool read_inits (unsigned count); |
| 3954 | |
| 3955 | private: |
| 3956 | unsigned write_pendings (elf_out *to, vec<depset *> depsets, |
| 3957 | depset::hash &, unsigned *crc_ptr); |
| 3958 | bool read_pendings (unsigned count); |
| 3959 | |
| 3960 | private: |
| 3961 | void write_entities (elf_out *to, vec<depset *> depsets, |
| 3962 | unsigned count, unsigned *crc_ptr); |
| 3963 | bool read_entities (unsigned count, unsigned lwm, unsigned hwm); |
| 3964 | |
| 3965 | private: |
| 3966 | void write_init_maps (); |
| 3967 | range_t write_prepare_maps (module_state_config *, bool); |
| 3968 | bool read_prepare_maps (const module_state_config *); |
| 3969 | |
| 3970 | void write_ordinary_maps (elf_out *to, range_t &, |
| 3971 | bool, unsigned *crc_ptr); |
| 3972 | bool read_ordinary_maps (line_map_uint_t, unsigned); |
| 3973 | void write_macro_maps (elf_out *to, range_t &, unsigned *crc_ptr); |
| 3974 | bool read_macro_maps (line_map_uint_t); |
| 3975 | |
| 3976 | void write_diagnostic_classification (elf_out *, diagnostics::context *, |
| 3977 | unsigned *); |
| 3978 | bool read_diagnostic_classification (diagnostics::context *); |
| 3979 | |
| 3980 | private: |
| 3981 | void write_define (bytes_out &, const cpp_macro *); |
| 3982 | cpp_macro *read_define (bytes_in &, cpp_reader *) const; |
| 3983 | vec<cpp_hashnode *> *prepare_macros (cpp_reader *); |
| 3984 | unsigned write_macros (elf_out *to, vec<cpp_hashnode *> *, unsigned *crc_ptr); |
| 3985 | bool read_macros (); |
| 3986 | void install_macros (); |
| 3987 | |
| 3988 | public: |
| 3989 | void import_macros (); |
| 3990 | |
| 3991 | public: |
| 3992 | static void undef_macro (cpp_reader *, location_t, cpp_hashnode *); |
| 3993 | static cpp_macro *deferred_macro (cpp_reader *, location_t, cpp_hashnode *); |
| 3994 | |
| 3995 | public: |
| 3996 | static bool note_location (location_t); |
| 3997 | static void write_location (bytes_out &, location_t); |
| 3998 | location_t read_location (bytes_in &) const; |
| 3999 | |
| 4000 | public: |
| 4001 | void set_flatname (); |
| 4002 | const char *get_flatname () const |
| 4003 | { |
| 4004 | return flatname; |
| 4005 | } |
| 4006 | location_t imported_from () const; |
| 4007 | |
| 4008 | public: |
| 4009 | void set_filename (const Cody::Packet &); |
| 4010 | bool do_import (cpp_reader *, bool outermost); |
| 4011 | bool check_importable (cpp_reader *); |
| 4012 | }; |
| 4013 | |
| 4014 | /* Hash module state by name. This cannot be a member of |
| 4015 | module_state, because of GTY restrictions. We never delete from |
| 4016 | the hash table, but ggc_ptr_hash doesn't support that |
| 4017 | simplification. */ |
| 4018 | |
| 4019 | struct module_state_hash : ggc_ptr_hash<module_state> { |
| 4020 | typedef std::pair<tree,uintptr_t> compare_type; /* {name,parent} */ |
| 4021 | |
| 4022 | static inline hashval_t hash (const value_type m); |
| 4023 | static inline hashval_t hash (const compare_type &n); |
| 4024 | static inline bool equal (const value_type existing, |
| 4025 | const compare_type &candidate); |
| 4026 | }; |
| 4027 | |
| 4028 | module_state::module_state (tree name, module_state *parent, bool partition) |
| 4029 | : imports (BITMAP_GGC_ALLOC ()), exports (BITMAP_GGC_ALLOC ()), |
| 4030 | parent (parent), name (name), slurp (NULL), |
| 4031 | flatname (NULL), filename (NULL), |
| 4032 | entity_lwm (~0u >> 1), entity_num (0), |
| 4033 | ordinary_locs (0, 0), macro_locs (0, 0), |
| 4034 | loc (UNKNOWN_LOCATION), |
| 4035 | crc (0), mod (MODULE_UNKNOWN), remap (0), subst (0) |
| 4036 | { |
| 4037 | loadedness = ML_NONE; |
| 4038 | |
| 4039 | module_p = header_p = interface_p = partition_p = false; |
| 4040 | |
| 4041 | directness = MD_NONE; |
| 4042 | exported_p = false; |
| 4043 | |
| 4044 | cmi_noted_p = false; |
| 4045 | active_init_p = false; |
| 4046 | |
| 4047 | partition_p = partition; |
| 4048 | |
| 4049 | inform_cmi_p = false; |
| 4050 | visited_p = false; |
| 4051 | |
| 4052 | extensions = 0; |
| 4053 | if (name && TREE_CODE (name) == STRING_CST) |
| 4054 | { |
| 4055 | header_p = true; |
| 4056 | |
| 4057 | const char *string = TREE_STRING_POINTER (name); |
| 4058 | gcc_checking_assert (string[0] == '.' |
| 4059 | ? IS_DIR_SEPARATOR (string[1]) |
| 4060 | : IS_ABSOLUTE_PATH (string)); |
| 4061 | } |
| 4062 | |
| 4063 | gcc_checking_assert (!(parent && header_p)); |
| 4064 | } |
| 4065 | |
| 4066 | module_state::~module_state () |
| 4067 | { |
| 4068 | release (); |
| 4069 | } |
| 4070 | |
| 4071 | /* Hash module state. */ |
| 4072 | static hashval_t |
| 4073 | module_name_hash (const_tree name) |
| 4074 | { |
| 4075 | if (TREE_CODE (name) == STRING_CST) |
| 4076 | return htab_hash_string (TREE_STRING_POINTER (name)); |
| 4077 | else |
| 4078 | return IDENTIFIER_HASH_VALUE (name); |
| 4079 | } |
| 4080 | |
| 4081 | hashval_t |
| 4082 | module_state_hash::hash (const value_type m) |
| 4083 | { |
| 4084 | hashval_t ph = pointer_hash<void>::hash |
| 4085 | (candidate: reinterpret_cast<void *> (reinterpret_cast<uintptr_t> (m->parent) |
| 4086 | | m->is_partition ())); |
| 4087 | hashval_t nh = module_name_hash (name: m->name); |
| 4088 | return iterative_hash_hashval_t (val: ph, val2: nh); |
| 4089 | } |
| 4090 | |
| 4091 | /* Hash a name. */ |
| 4092 | hashval_t |
| 4093 | module_state_hash::hash (const compare_type &c) |
| 4094 | { |
| 4095 | hashval_t ph = pointer_hash<void>::hash (candidate: reinterpret_cast<void *> (c.second)); |
| 4096 | hashval_t nh = module_name_hash (name: c.first); |
| 4097 | |
| 4098 | return iterative_hash_hashval_t (val: ph, val2: nh); |
| 4099 | } |
| 4100 | |
| 4101 | bool |
| 4102 | module_state_hash::equal (const value_type existing, |
| 4103 | const compare_type &candidate) |
| 4104 | { |
| 4105 | uintptr_t ep = (reinterpret_cast<uintptr_t> (existing->parent) |
| 4106 | | existing->is_partition ()); |
| 4107 | if (ep != candidate.second) |
| 4108 | return false; |
| 4109 | |
| 4110 | /* Identifier comparison is by pointer. If the string_csts happen |
| 4111 | to be the same object, then they're equal too. */ |
| 4112 | if (existing->name == candidate.first) |
| 4113 | return true; |
| 4114 | |
| 4115 | /* If neither are string csts, they can't be equal. */ |
| 4116 | if (TREE_CODE (candidate.first) != STRING_CST |
| 4117 | || TREE_CODE (existing->name) != STRING_CST) |
| 4118 | return false; |
| 4119 | |
| 4120 | /* String equality. */ |
| 4121 | if (TREE_STRING_LENGTH (existing->name) |
| 4122 | == TREE_STRING_LENGTH (candidate.first) |
| 4123 | && !memcmp (TREE_STRING_POINTER (existing->name), |
| 4124 | TREE_STRING_POINTER (candidate.first), |
| 4125 | TREE_STRING_LENGTH (existing->name))) |
| 4126 | return true; |
| 4127 | |
| 4128 | return false; |
| 4129 | } |
| 4130 | |
| 4131 | /********************************************************************/ |
| 4132 | /* Global state */ |
| 4133 | |
| 4134 | /* Mapper name. */ |
| 4135 | static const char *module_mapper_name; |
| 4136 | |
| 4137 | /* Deferred import queue (FIFO). */ |
| 4138 | static vec<module_state *, va_heap, vl_embed> *pending_imports; |
| 4139 | |
| 4140 | /* CMI repository path and workspace. */ |
| 4141 | static char *cmi_repo; |
| 4142 | static size_t cmi_repo_length; |
| 4143 | static char *cmi_path; |
| 4144 | static size_t cmi_path_alloc; |
| 4145 | |
| 4146 | /* Count of available and loaded clusters. */ |
| 4147 | static unsigned available_clusters; |
| 4148 | static unsigned loaded_clusters; |
| 4149 | |
| 4150 | /* What the current TU is. */ |
| 4151 | unsigned module_kind; |
| 4152 | |
| 4153 | /* Global trees. */ |
| 4154 | static const std::pair<tree *, unsigned> global_tree_arys[] = |
| 4155 | { |
| 4156 | std::pair<tree *, unsigned> (sizetype_tab, stk_type_kind_last), |
| 4157 | std::pair<tree *, unsigned> (integer_types, itk_none), |
| 4158 | std::pair<tree *, unsigned> (global_trees, TI_MODULE_HWM), |
| 4159 | std::pair<tree *, unsigned> (c_global_trees, CTI_MODULE_HWM), |
| 4160 | std::pair<tree *, unsigned> (cp_global_trees, CPTI_MODULE_HWM), |
| 4161 | std::pair<tree *, unsigned> (NULL, 0) |
| 4162 | }; |
| 4163 | static GTY(()) vec<tree, va_gc> *fixed_trees; |
| 4164 | static unsigned global_crc; |
| 4165 | |
| 4166 | /* Lazy loading can open many files concurrently, there are |
| 4167 | per-process limits on that. We pay attention to the process limit, |
| 4168 | and attempt to increase it when we run out. Otherwise we use an |
| 4169 | LRU scheme to figure out who to flush. Note that if the import |
| 4170 | graph /depth/ exceeds lazy_limit, we'll exceed the limit. */ |
| 4171 | static unsigned lazy_lru; /* LRU counter. */ |
| 4172 | static unsigned lazy_open; /* Number of open modules */ |
| 4173 | static unsigned lazy_limit; /* Current limit of open modules. */ |
| 4174 | static unsigned lazy_hard_limit; /* Hard limit on open modules. */ |
| 4175 | /* Account for source, assembler and dump files & directory searches. |
| 4176 | We don't keep the source file's open, so we don't have to account |
| 4177 | for #include depth. I think dump files are opened and closed per |
| 4178 | pass, but ICBW. */ |
| 4179 | #define LAZY_HEADROOM 15 /* File descriptor headroom. */ |
| 4180 | |
| 4181 | /* Vector of module state. Indexed by OWNER. Index 0 is reserved for the |
| 4182 | current TU; imports start at 1. */ |
| 4183 | static GTY(()) vec<module_state *, va_gc> *modules; |
| 4184 | |
| 4185 | /* Get the module state for the current TU's module. */ |
| 4186 | |
| 4187 | static module_state * |
| 4188 | this_module() { |
| 4189 | return (*modules)[0]; |
| 4190 | } |
| 4191 | |
| 4192 | /* Hash of module state, findable by {name, parent}. */ |
| 4193 | static GTY(()) hash_table<module_state_hash> *modules_hash; |
| 4194 | |
| 4195 | /* Map of imported entities. We map DECL_UID to index of entity |
| 4196 | vector. */ |
| 4197 | typedef hash_map<unsigned/*UID*/, unsigned/*index*/, |
| 4198 | simple_hashmap_traits<int_hash<unsigned,0>, unsigned> |
| 4199 | > entity_map_t; |
| 4200 | static entity_map_t *entity_map; |
| 4201 | /* Doesn't need GTYing, because any tree referenced here is also |
| 4202 | findable by, symbol table, specialization table, return type of |
| 4203 | reachable function. */ |
| 4204 | static vec<binding_slot, va_heap, vl_embed> *entity_ary; |
| 4205 | |
| 4206 | /* Members entities of imported classes that are defined in this TU. |
| 4207 | These are where the entity's context is not from the current TU. |
| 4208 | We need to emit the definition (but not the enclosing class). |
| 4209 | |
| 4210 | We could find these by walking ALL the imported classes that we |
| 4211 | could provide a member definition. But that's expensive, |
| 4212 | especially when you consider lazy implicit member declarations, |
| 4213 | which could be ANY imported class. */ |
| 4214 | static GTY(()) vec<tree, va_gc> *class_members; |
| 4215 | |
| 4216 | /* The same problem exists for class template partial |
| 4217 | specializations. Now that we have constraints, the invariant of |
| 4218 | expecting them in the instantiation table no longer holds. One of |
| 4219 | the constrained partial specializations will be there, but the |
| 4220 | others not so much. It's not even an unconstrained partial |
| 4221 | spacialization in the table :( so any partial template declaration |
| 4222 | is added to this list too. */ |
| 4223 | static GTY(()) vec<tree, va_gc> *partial_specializations; |
| 4224 | |
| 4225 | /********************************************************************/ |
| 4226 | |
| 4227 | /* Our module mapper (created lazily). */ |
| 4228 | module_client *mapper; |
| 4229 | |
| 4230 | static module_client *make_mapper (location_t loc, class mkdeps *deps); |
| 4231 | inline module_client *get_mapper (location_t loc, class mkdeps *deps) |
| 4232 | { |
| 4233 | auto *res = mapper; |
| 4234 | if (!res) |
| 4235 | res = make_mapper (loc, deps); |
| 4236 | return res; |
| 4237 | } |
| 4238 | |
| 4239 | /********************************************************************/ |
| 4240 | static tree |
| 4241 | get_clone_target (tree decl) |
| 4242 | { |
| 4243 | tree target; |
| 4244 | |
| 4245 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 4246 | { |
| 4247 | tree res_orig = DECL_CLONED_FUNCTION (DECL_TEMPLATE_RESULT (decl)); |
| 4248 | |
| 4249 | target = DECL_TI_TEMPLATE (res_orig); |
| 4250 | } |
| 4251 | else |
| 4252 | target = DECL_CLONED_FUNCTION (decl); |
| 4253 | |
| 4254 | gcc_checking_assert (DECL_MAYBE_IN_CHARGE_CDTOR_P (target)); |
| 4255 | |
| 4256 | return target; |
| 4257 | } |
| 4258 | |
| 4259 | /* Like FOR_EACH_CLONE, but will walk cloned templates. */ |
| 4260 | #define FOR_EVERY_CLONE(CLONE, FN) \ |
| 4261 | if (!DECL_MAYBE_IN_CHARGE_CDTOR_P (FN)); \ |
| 4262 | else \ |
| 4263 | for (CLONE = DECL_CHAIN (FN); \ |
| 4264 | CLONE && DECL_CLONED_FUNCTION_P (CLONE); \ |
| 4265 | CLONE = DECL_CHAIN (CLONE)) |
| 4266 | |
| 4267 | /* It'd be nice if USE_TEMPLATE was a field of template_info |
| 4268 | (a) it'd solve the enum case dealt with below, |
| 4269 | (b) both class templates and decl templates would store this in the |
| 4270 | same place |
| 4271 | (c) this function wouldn't need the by-ref arg, which is annoying. */ |
| 4272 | |
| 4273 | static tree |
| 4274 | node_template_info (tree decl, int &use) |
| 4275 | { |
| 4276 | tree ti = NULL_TREE; |
| 4277 | int use_tpl = -1; |
| 4278 | if (DECL_IMPLICIT_TYPEDEF_P (decl)) |
| 4279 | { |
| 4280 | tree type = TREE_TYPE (decl); |
| 4281 | |
| 4282 | ti = TYPE_TEMPLATE_INFO (type); |
| 4283 | if (ti) |
| 4284 | { |
| 4285 | if (TYPE_LANG_SPECIFIC (type)) |
| 4286 | use_tpl = CLASSTYPE_USE_TEMPLATE (type); |
| 4287 | else |
| 4288 | { |
| 4289 | /* An enum, where we don't explicitly encode use_tpl. |
| 4290 | If the containing context (a type or a function), is |
| 4291 | an ({im,ex}plicit) instantiation, then this is too. |
| 4292 | If it's a partial or explicit specialization, then |
| 4293 | this is not!. */ |
| 4294 | tree ctx = CP_DECL_CONTEXT (decl); |
| 4295 | if (TYPE_P (ctx)) |
| 4296 | ctx = TYPE_NAME (ctx); |
| 4297 | node_template_info (decl: ctx, use); |
| 4298 | use_tpl = use != 2 ? use : 0; |
| 4299 | } |
| 4300 | } |
| 4301 | } |
| 4302 | else if (DECL_LANG_SPECIFIC (decl) |
| 4303 | && (VAR_P (decl) |
| 4304 | || TREE_CODE (decl) == TYPE_DECL |
| 4305 | || TREE_CODE (decl) == FUNCTION_DECL |
| 4306 | || TREE_CODE (decl) == FIELD_DECL |
| 4307 | || TREE_CODE (decl) == CONCEPT_DECL |
| 4308 | || TREE_CODE (decl) == TEMPLATE_DECL)) |
| 4309 | { |
| 4310 | use_tpl = DECL_USE_TEMPLATE (decl); |
| 4311 | ti = DECL_TEMPLATE_INFO (decl); |
| 4312 | } |
| 4313 | |
| 4314 | use = use_tpl; |
| 4315 | return ti; |
| 4316 | } |
| 4317 | |
| 4318 | /* Find the index in entity_ary for an imported DECL. It should |
| 4319 | always be there, but bugs can cause it to be missing, and that can |
| 4320 | crash the crash reporting -- let's not do that! When streaming |
| 4321 | out we place entities from this module there too -- with negated |
| 4322 | indices. */ |
| 4323 | |
| 4324 | static unsigned |
| 4325 | import_entity_index (tree decl, bool null_ok = false) |
| 4326 | { |
| 4327 | if (unsigned *slot = entity_map->get (DECL_UID (decl))) |
| 4328 | return *slot; |
| 4329 | |
| 4330 | gcc_checking_assert (null_ok); |
| 4331 | return ~(~0u >> 1); |
| 4332 | } |
| 4333 | |
| 4334 | /* Find the module for an imported entity at INDEX in the entity ary. |
| 4335 | There must be one. */ |
| 4336 | |
| 4337 | static module_state * |
| 4338 | import_entity_module (unsigned index) |
| 4339 | { |
| 4340 | if (index > ~(~0u >> 1)) |
| 4341 | /* This is an index for an exported entity. */ |
| 4342 | return this_module (); |
| 4343 | |
| 4344 | /* Do not include the current TU (not an off-by-one error). */ |
| 4345 | unsigned pos = 1; |
| 4346 | unsigned len = modules->length () - pos; |
| 4347 | while (len) |
| 4348 | { |
| 4349 | unsigned half = len / 2; |
| 4350 | module_state *probe = (*modules)[pos + half]; |
| 4351 | if (index < probe->entity_lwm) |
| 4352 | len = half; |
| 4353 | else if (index < probe->entity_lwm + probe->entity_num) |
| 4354 | return probe; |
| 4355 | else |
| 4356 | { |
| 4357 | pos += half + 1; |
| 4358 | len = len - (half + 1); |
| 4359 | } |
| 4360 | } |
| 4361 | gcc_unreachable (); |
| 4362 | } |
| 4363 | |
| 4364 | |
| 4365 | /********************************************************************/ |
| 4366 | /* A dumping machinery. */ |
| 4367 | |
| 4368 | class dumper { |
| 4369 | public: |
| 4370 | enum { |
| 4371 | LOCATION = TDF_LINENO, /* -lineno:Source location streaming. */ |
| 4372 | DEPEND = TDF_GRAPH, /* -graph:Dependency graph construction. */ |
| 4373 | CLUSTER = TDF_BLOCKS, /* -blocks:Clusters. */ |
| 4374 | TREE = TDF_UID, /* -uid:Tree streaming. */ |
| 4375 | MERGE = TDF_ALIAS, /* -alias:Mergeable Entities. */ |
| 4376 | ELF = TDF_ASMNAME, /* -asmname:Elf data. */ |
| 4377 | MACRO = TDF_VOPS /* -vops:Macros. */ |
| 4378 | }; |
| 4379 | |
| 4380 | private: |
| 4381 | struct impl { |
| 4382 | typedef vec<module_state *, va_heap, vl_embed> stack_t; |
| 4383 | |
| 4384 | FILE *stream; /* Dump stream. */ |
| 4385 | unsigned indent; /* Local indentation. */ |
| 4386 | bool bol; /* Beginning of line. */ |
| 4387 | stack_t stack; /* Trailing array of module_state. */ |
| 4388 | |
| 4389 | bool nested_name (tree); /* Dump a name following DECL_CONTEXT. */ |
| 4390 | }; |
| 4391 | |
| 4392 | public: |
| 4393 | /* The dumper. */ |
| 4394 | impl *dumps; |
| 4395 | dump_flags_t flags; |
| 4396 | |
| 4397 | public: |
| 4398 | /* Push/pop module state dumping. */ |
| 4399 | unsigned push (module_state *); |
| 4400 | void pop (unsigned); |
| 4401 | |
| 4402 | public: |
| 4403 | /* Change local indentation. */ |
| 4404 | void indent () |
| 4405 | { |
| 4406 | if (dumps) |
| 4407 | dumps->indent++; |
| 4408 | } |
| 4409 | void outdent () |
| 4410 | { |
| 4411 | if (dumps) |
| 4412 | { |
| 4413 | gcc_checking_assert (dumps->indent); |
| 4414 | dumps->indent--; |
| 4415 | } |
| 4416 | } |
| 4417 | |
| 4418 | public: |
| 4419 | /* Is dump enabled?. */ |
| 4420 | bool operator () (int mask = 0) |
| 4421 | { |
| 4422 | if (!dumps || !dumps->stream) |
| 4423 | return false; |
| 4424 | if (mask && !(mask & flags)) |
| 4425 | return false; |
| 4426 | return true; |
| 4427 | } |
| 4428 | /* Dump some information. */ |
| 4429 | bool operator () (const char *, ...); |
| 4430 | }; |
| 4431 | |
| 4432 | /* The dumper. */ |
| 4433 | static dumper dump = {.dumps: 0, .flags: dump_flags_t (0)}; |
| 4434 | |
| 4435 | /* Push to dumping M. Return previous indentation level. */ |
| 4436 | |
| 4437 | unsigned |
| 4438 | dumper::push (module_state *m) |
| 4439 | { |
| 4440 | FILE *stream = NULL; |
| 4441 | if (!dumps || !dumps->stack.length ()) |
| 4442 | { |
| 4443 | stream = dump_begin (module_dump_id, &flags); |
| 4444 | if (!stream) |
| 4445 | return 0; |
| 4446 | } |
| 4447 | |
| 4448 | if (!dumps || !dumps->stack.space (nelems: 1)) |
| 4449 | { |
| 4450 | /* Create or extend the dump implementor. */ |
| 4451 | unsigned current = dumps ? dumps->stack.length () : 0; |
| 4452 | unsigned count = current ? current * 2 : EXPERIMENT (1, 20); |
| 4453 | size_t alloc = (offsetof (impl, stack) |
| 4454 | + impl::stack_t::embedded_size (alloc: count)); |
| 4455 | dumps = XRESIZEVAR (impl, dumps, alloc); |
| 4456 | dumps->stack.embedded_init (alloc: count, num: current); |
| 4457 | } |
| 4458 | if (stream) |
| 4459 | dumps->stream = stream; |
| 4460 | |
| 4461 | unsigned n = dumps->indent; |
| 4462 | dumps->indent = 0; |
| 4463 | dumps->bol = true; |
| 4464 | dumps->stack.quick_push (obj: m); |
| 4465 | if (m) |
| 4466 | { |
| 4467 | module_state *from = NULL; |
| 4468 | |
| 4469 | if (dumps->stack.length () > 1) |
| 4470 | from = dumps->stack[dumps->stack.length () - 2]; |
| 4471 | else |
| 4472 | dump ("" ); |
| 4473 | dump (from ? "Starting module %M (from %M)" |
| 4474 | : "Starting module %M" , m, from); |
| 4475 | } |
| 4476 | |
| 4477 | return n; |
| 4478 | } |
| 4479 | |
| 4480 | /* Pop from dumping. Restore indentation to N. */ |
| 4481 | |
| 4482 | void dumper::pop (unsigned n) |
| 4483 | { |
| 4484 | if (!dumps) |
| 4485 | return; |
| 4486 | |
| 4487 | gcc_checking_assert (dump () && !dumps->indent); |
| 4488 | if (module_state *m = dumps->stack[dumps->stack.length () - 1]) |
| 4489 | { |
| 4490 | module_state *from = (dumps->stack.length () > 1 |
| 4491 | ? dumps->stack[dumps->stack.length () - 2] : NULL); |
| 4492 | dump (from ? "Finishing module %M (returning to %M)" |
| 4493 | : "Finishing module %M" , m, from); |
| 4494 | } |
| 4495 | dumps->stack.pop (); |
| 4496 | dumps->indent = n; |
| 4497 | if (!dumps->stack.length ()) |
| 4498 | { |
| 4499 | dump_end (module_dump_id, dumps->stream); |
| 4500 | dumps->stream = NULL; |
| 4501 | } |
| 4502 | } |
| 4503 | |
| 4504 | /* Dump a nested name for arbitrary tree T. Sometimes it won't have a |
| 4505 | name. */ |
| 4506 | |
| 4507 | bool |
| 4508 | dumper::impl::nested_name (tree t) |
| 4509 | { |
| 4510 | tree ti = NULL_TREE; |
| 4511 | int origin = -1; |
| 4512 | tree name = NULL_TREE; |
| 4513 | |
| 4514 | if (t && TREE_CODE (t) == TU_LOCAL_ENTITY) |
| 4515 | t = TU_LOCAL_ENTITY_NAME (t); |
| 4516 | |
| 4517 | if (t && TREE_CODE (t) == TREE_BINFO) |
| 4518 | t = BINFO_TYPE (t); |
| 4519 | |
| 4520 | if (t && TYPE_P (t)) |
| 4521 | t = TYPE_NAME (t); |
| 4522 | |
| 4523 | if (t && DECL_P (t)) |
| 4524 | { |
| 4525 | if (t == global_namespace || DECL_TEMPLATE_PARM_P (t)) |
| 4526 | ; |
| 4527 | else if (tree ctx = DECL_CONTEXT (t)) |
| 4528 | if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL |
| 4529 | || nested_name (t: ctx)) |
| 4530 | fputs (s: "::" , stream: stream); |
| 4531 | |
| 4532 | int use_tpl; |
| 4533 | ti = node_template_info (decl: t, use&: use_tpl); |
| 4534 | if (ti && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL |
| 4535 | && (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == t)) |
| 4536 | t = TI_TEMPLATE (ti); |
| 4537 | tree not_tmpl = t; |
| 4538 | if (TREE_CODE (t) == TEMPLATE_DECL) |
| 4539 | { |
| 4540 | fputs (s: "template " , stream: stream); |
| 4541 | not_tmpl = DECL_TEMPLATE_RESULT (t); |
| 4542 | } |
| 4543 | |
| 4544 | if (not_tmpl |
| 4545 | && DECL_P (not_tmpl) |
| 4546 | && DECL_LANG_SPECIFIC (not_tmpl) |
| 4547 | && DECL_MODULE_IMPORT_P (not_tmpl)) |
| 4548 | { |
| 4549 | /* We need to be careful here, so as to not explode on |
| 4550 | inconsistent data -- we're probably debugging, because |
| 4551 | Something Is Wrong. */ |
| 4552 | unsigned index = import_entity_index (decl: t, null_ok: true); |
| 4553 | if (!(index & ~(~0u >> 1))) |
| 4554 | origin = import_entity_module (index)->mod; |
| 4555 | else if (index > ~(~0u >> 1)) |
| 4556 | /* An imported partition member that we're emitting. */ |
| 4557 | origin = 0; |
| 4558 | else |
| 4559 | origin = -2; |
| 4560 | } |
| 4561 | |
| 4562 | name = DECL_NAME (t) ? DECL_NAME (t) |
| 4563 | : HAS_DECL_ASSEMBLER_NAME_P (t) ? DECL_ASSEMBLER_NAME_RAW (t) |
| 4564 | : NULL_TREE; |
| 4565 | } |
| 4566 | else |
| 4567 | name = t; |
| 4568 | |
| 4569 | if (name) |
| 4570 | switch (TREE_CODE (name)) |
| 4571 | { |
| 4572 | default: |
| 4573 | fputs (s: "#unnamed#" , stream: stream); |
| 4574 | break; |
| 4575 | |
| 4576 | case IDENTIFIER_NODE: |
| 4577 | fwrite (IDENTIFIER_POINTER (name), size: 1, IDENTIFIER_LENGTH (name), s: stream); |
| 4578 | break; |
| 4579 | |
| 4580 | case INTEGER_CST: |
| 4581 | print_hex (wi: wi::to_wide (t: name), file: stream); |
| 4582 | break; |
| 4583 | |
| 4584 | case STRING_CST: |
| 4585 | /* If TREE_TYPE is NULL, this is a raw string. */ |
| 4586 | fwrite (TREE_STRING_POINTER (name), size: 1, |
| 4587 | TREE_STRING_LENGTH (name) - (TREE_TYPE (name) != NULL_TREE), |
| 4588 | s: stream); |
| 4589 | break; |
| 4590 | } |
| 4591 | else |
| 4592 | fputs (s: "#null#" , stream: stream); |
| 4593 | |
| 4594 | if (t && TREE_CODE (t) == FUNCTION_DECL && DECL_COROUTINE_P (t)) |
| 4595 | if (tree ramp = DECL_RAMP_FN (t)) |
| 4596 | { |
| 4597 | if (DECL_ACTOR_FN (ramp) == t) |
| 4598 | fputs (s: ".actor" , stream: stream); |
| 4599 | else if (DECL_DESTROY_FN (ramp) == t) |
| 4600 | fputs (s: ".destroy" , stream: stream); |
| 4601 | else |
| 4602 | gcc_unreachable (); |
| 4603 | } |
| 4604 | |
| 4605 | if (origin >= 0) |
| 4606 | { |
| 4607 | const module_state *module = (*modules)[origin]; |
| 4608 | fprintf (stream: stream, format: "@%s:%d" , !module ? "" : !module->name ? "(unnamed)" |
| 4609 | : module->get_flatname (), origin); |
| 4610 | } |
| 4611 | else if (origin == -2) |
| 4612 | fprintf (stream: stream, format: "@???" ); |
| 4613 | |
| 4614 | if (ti) |
| 4615 | { |
| 4616 | tree args = INNERMOST_TEMPLATE_ARGS (TI_ARGS (ti)); |
| 4617 | fputs (s: "<" , stream: stream); |
| 4618 | if (args) |
| 4619 | for (int ix = 0; ix != TREE_VEC_LENGTH (args); ix++) |
| 4620 | { |
| 4621 | if (ix) |
| 4622 | fputs (s: "," , stream: stream); |
| 4623 | nested_name (TREE_VEC_ELT (args, ix)); |
| 4624 | } |
| 4625 | fputs (s: ">" , stream: stream); |
| 4626 | } |
| 4627 | |
| 4628 | return true; |
| 4629 | } |
| 4630 | |
| 4631 | /* Formatted dumping. FORMAT begins with '+' do not emit a trailing |
| 4632 | new line. (Normally it is appended.) |
| 4633 | Escapes: |
| 4634 | %C - tree_code |
| 4635 | %I - identifier |
| 4636 | %K - location_t or line_map_uint_t |
| 4637 | %M - module_state |
| 4638 | %N - name -- DECL_NAME |
| 4639 | %P - context:name pair |
| 4640 | %R - unsigned:unsigned ratio |
| 4641 | %S - symbol -- DECL_ASSEMBLER_NAME |
| 4642 | %U - long unsigned |
| 4643 | %V - version |
| 4644 | --- the following are printf-like, but without its flexibility |
| 4645 | %d - decimal int |
| 4646 | %p - pointer |
| 4647 | %s - string |
| 4648 | %u - unsigned int |
| 4649 | %x - hex int |
| 4650 | |
| 4651 | We do not implement the printf modifiers. */ |
| 4652 | |
| 4653 | bool |
| 4654 | dumper::operator () (const char *format, ...) |
| 4655 | { |
| 4656 | if (!(*this) ()) |
| 4657 | return false; |
| 4658 | |
| 4659 | bool no_nl = format[0] == '+'; |
| 4660 | format += no_nl; |
| 4661 | |
| 4662 | if (dumps->bol) |
| 4663 | { |
| 4664 | /* Module import indent. */ |
| 4665 | if (unsigned depth = dumps->stack.length () - 1) |
| 4666 | { |
| 4667 | const char *prefix = ">>>>" ; |
| 4668 | fprintf (stream: dumps->stream, format: (depth <= strlen (s: prefix) |
| 4669 | ? &prefix[strlen (s: prefix) - depth] |
| 4670 | : ">.%d.>" ), depth); |
| 4671 | } |
| 4672 | |
| 4673 | /* Local indent. */ |
| 4674 | if (unsigned indent = dumps->indent) |
| 4675 | { |
| 4676 | const char *prefix = " " ; |
| 4677 | fprintf (stream: dumps->stream, format: (indent <= strlen (s: prefix) |
| 4678 | ? &prefix[strlen (s: prefix) - indent] |
| 4679 | : " .%d. " ), indent); |
| 4680 | } |
| 4681 | dumps->bol = false; |
| 4682 | } |
| 4683 | |
| 4684 | va_list args; |
| 4685 | va_start (args, format); |
| 4686 | while (const char *esc = strchr (s: format, c: '%')) |
| 4687 | { |
| 4688 | fwrite (ptr: format, size: 1, n: (size_t)(esc - format), s: dumps->stream); |
| 4689 | format = ++esc; |
| 4690 | switch (*format++) |
| 4691 | { |
| 4692 | default: |
| 4693 | gcc_unreachable (); |
| 4694 | |
| 4695 | case '%': |
| 4696 | fputc (c: '%', stream: dumps->stream); |
| 4697 | break; |
| 4698 | |
| 4699 | case 'C': /* Code */ |
| 4700 | { |
| 4701 | tree_code code = (tree_code)va_arg (args, unsigned); |
| 4702 | fputs (s: get_tree_code_name (code), stream: dumps->stream); |
| 4703 | } |
| 4704 | break; |
| 4705 | |
| 4706 | case 'I': /* Identifier. */ |
| 4707 | { |
| 4708 | tree t = va_arg (args, tree); |
| 4709 | dumps->nested_name (t); |
| 4710 | } |
| 4711 | break; |
| 4712 | |
| 4713 | case 'K': /* location_t, either 32- or 64-bit. */ |
| 4714 | { |
| 4715 | unsigned long long u = va_arg (args, location_t); |
| 4716 | fprintf (stream: dumps->stream, format: "%llu" , u); |
| 4717 | } |
| 4718 | break; |
| 4719 | |
| 4720 | case 'M': /* Module. */ |
| 4721 | { |
| 4722 | const char *str = "(none)" ; |
| 4723 | if (module_state *m = va_arg (args, module_state *)) |
| 4724 | { |
| 4725 | if (!m->has_location ()) |
| 4726 | str = "(detached)" ; |
| 4727 | else |
| 4728 | str = m->get_flatname (); |
| 4729 | } |
| 4730 | fputs (s: str, stream: dumps->stream); |
| 4731 | } |
| 4732 | break; |
| 4733 | |
| 4734 | case 'N': /* Name. */ |
| 4735 | { |
| 4736 | tree t = va_arg (args, tree); |
| 4737 | while (t && TREE_CODE (t) == OVERLOAD) |
| 4738 | t = OVL_FUNCTION (t); |
| 4739 | fputc (c: '\'', stream: dumps->stream); |
| 4740 | dumps->nested_name (t); |
| 4741 | fputc (c: '\'', stream: dumps->stream); |
| 4742 | } |
| 4743 | break; |
| 4744 | |
| 4745 | case 'P': /* Pair. */ |
| 4746 | { |
| 4747 | tree ctx = va_arg (args, tree); |
| 4748 | tree name = va_arg (args, tree); |
| 4749 | fputc (c: '\'', stream: dumps->stream); |
| 4750 | dumps->nested_name (t: ctx); |
| 4751 | if (ctx && ctx != global_namespace) |
| 4752 | fputs (s: "::" , stream: dumps->stream); |
| 4753 | dumps->nested_name (t: name); |
| 4754 | fputc (c: '\'', stream: dumps->stream); |
| 4755 | } |
| 4756 | break; |
| 4757 | |
| 4758 | case 'R': /* Ratio */ |
| 4759 | { |
| 4760 | unsigned a = va_arg (args, unsigned); |
| 4761 | unsigned b = va_arg (args, unsigned); |
| 4762 | fprintf (stream: dumps->stream, format: "%.1f" , (float) a / (b + !b)); |
| 4763 | } |
| 4764 | break; |
| 4765 | |
| 4766 | case 'S': /* Symbol name */ |
| 4767 | { |
| 4768 | tree t = va_arg (args, tree); |
| 4769 | if (t && TYPE_P (t)) |
| 4770 | t = TYPE_NAME (t); |
| 4771 | if (t && HAS_DECL_ASSEMBLER_NAME_P (t) |
| 4772 | && DECL_ASSEMBLER_NAME_SET_P (t)) |
| 4773 | { |
| 4774 | fputc (c: '(', stream: dumps->stream); |
| 4775 | fputs (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t)), |
| 4776 | stream: dumps->stream); |
| 4777 | fputc (c: ')', stream: dumps->stream); |
| 4778 | } |
| 4779 | } |
| 4780 | break; |
| 4781 | |
| 4782 | case 'U': /* long unsigned. */ |
| 4783 | { |
| 4784 | unsigned long u = va_arg (args, unsigned long); |
| 4785 | fprintf (stream: dumps->stream, format: "%lu" , u); |
| 4786 | } |
| 4787 | break; |
| 4788 | |
| 4789 | case 'V': /* Version. */ |
| 4790 | { |
| 4791 | unsigned v = va_arg (args, unsigned); |
| 4792 | verstr_t string; |
| 4793 | |
| 4794 | version2string (version: v, out&: string); |
| 4795 | fputs (s: string, stream: dumps->stream); |
| 4796 | } |
| 4797 | break; |
| 4798 | |
| 4799 | case 'c': /* Character. */ |
| 4800 | { |
| 4801 | int c = va_arg (args, int); |
| 4802 | fputc (c: c, stream: dumps->stream); |
| 4803 | } |
| 4804 | break; |
| 4805 | |
| 4806 | case 'd': /* Decimal Int. */ |
| 4807 | { |
| 4808 | int d = va_arg (args, int); |
| 4809 | fprintf (stream: dumps->stream, format: "%d" , d); |
| 4810 | } |
| 4811 | break; |
| 4812 | |
| 4813 | case 'p': /* Pointer. */ |
| 4814 | { |
| 4815 | void *p = va_arg (args, void *); |
| 4816 | fprintf (stream: dumps->stream, format: "%p" , p); |
| 4817 | } |
| 4818 | break; |
| 4819 | |
| 4820 | case 's': /* String. */ |
| 4821 | { |
| 4822 | const char *s = va_arg (args, char *); |
| 4823 | gcc_checking_assert (s); |
| 4824 | fputs (s: s, stream: dumps->stream); |
| 4825 | } |
| 4826 | break; |
| 4827 | |
| 4828 | case 'u': /* Unsigned. */ |
| 4829 | { |
| 4830 | unsigned u = va_arg (args, unsigned); |
| 4831 | fprintf (stream: dumps->stream, format: "%u" , u); |
| 4832 | } |
| 4833 | break; |
| 4834 | |
| 4835 | case 'x': /* Hex. */ |
| 4836 | { |
| 4837 | unsigned x = va_arg (args, unsigned); |
| 4838 | fprintf (stream: dumps->stream, format: "%x" , x); |
| 4839 | } |
| 4840 | break; |
| 4841 | } |
| 4842 | } |
| 4843 | fputs (s: format, stream: dumps->stream); |
| 4844 | va_end (args); |
| 4845 | if (!no_nl) |
| 4846 | { |
| 4847 | dumps->bol = true; |
| 4848 | fputc (c: '\n', stream: dumps->stream); |
| 4849 | } |
| 4850 | return true; |
| 4851 | } |
| 4852 | |
| 4853 | struct note_def_cache_hasher : ggc_cache_ptr_hash<tree_node> |
| 4854 | { |
| 4855 | static int keep_cache_entry (tree t) |
| 4856 | { |
| 4857 | if (!CHECKING_P) |
| 4858 | /* GTY is unfortunately not clever enough to conditionalize |
| 4859 | this. */ |
| 4860 | gcc_unreachable (); |
| 4861 | |
| 4862 | if (ggc_marked_p (t)) |
| 4863 | return -1; |
| 4864 | |
| 4865 | unsigned n = dump.push (NULL); |
| 4866 | /* This might or might not be an error. We should note its |
| 4867 | dropping whichever. */ |
| 4868 | dump () && dump ("Dropping %N from note_defs table" , t); |
| 4869 | dump.pop (n); |
| 4870 | |
| 4871 | return 0; |
| 4872 | } |
| 4873 | }; |
| 4874 | |
| 4875 | /* We should stream each definition at most once. |
| 4876 | This needs to be a cache because there are cases where a definition |
| 4877 | ends up being not retained, and we need to drop those so we don't |
| 4878 | get confused if memory is reallocated. */ |
| 4879 | typedef hash_table<note_def_cache_hasher> note_defs_table_t; |
| 4880 | static GTY((cache)) note_defs_table_t *note_defs; |
| 4881 | |
| 4882 | void |
| 4883 | trees_in::assert_definition (tree decl ATTRIBUTE_UNUSED, |
| 4884 | bool installing ATTRIBUTE_UNUSED) |
| 4885 | { |
| 4886 | #if CHECKING_P |
| 4887 | tree *slot = note_defs->find_slot (value: decl, insert: installing ? INSERT : NO_INSERT); |
| 4888 | tree not_tmpl = STRIP_TEMPLATE (decl); |
| 4889 | if (installing) |
| 4890 | { |
| 4891 | /* We must be inserting for the first time. */ |
| 4892 | gcc_assert (!*slot); |
| 4893 | *slot = decl; |
| 4894 | } |
| 4895 | else |
| 4896 | /* If this is not the mergeable entity, it should not be in the |
| 4897 | table. If it is a non-global-module mergeable entity, it |
| 4898 | should be in the table. Global module entities could have been |
| 4899 | defined textually in the current TU and so might or might not |
| 4900 | be present. */ |
| 4901 | gcc_assert (!is_duplicate (decl) |
| 4902 | ? !slot |
| 4903 | : (slot |
| 4904 | || !DECL_LANG_SPECIFIC (not_tmpl) |
| 4905 | || !DECL_MODULE_PURVIEW_P (not_tmpl) |
| 4906 | || (!DECL_MODULE_IMPORT_P (not_tmpl) |
| 4907 | && header_module_p ()))); |
| 4908 | |
| 4909 | if (not_tmpl != decl) |
| 4910 | gcc_assert (!note_defs->find_slot (not_tmpl, NO_INSERT)); |
| 4911 | #endif |
| 4912 | } |
| 4913 | |
| 4914 | void |
| 4915 | trees_out::assert_definition (tree decl ATTRIBUTE_UNUSED) |
| 4916 | { |
| 4917 | #if CHECKING_P |
| 4918 | tree *slot = note_defs->find_slot (value: decl, insert: INSERT); |
| 4919 | gcc_assert (!*slot); |
| 4920 | *slot = decl; |
| 4921 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 4922 | gcc_assert (!note_defs->find_slot (DECL_TEMPLATE_RESULT (decl), NO_INSERT)); |
| 4923 | #endif |
| 4924 | } |
| 4925 | |
| 4926 | /********************************************************************/ |
| 4927 | static bool |
| 4928 | noisy_p () |
| 4929 | { |
| 4930 | if (quiet_flag) |
| 4931 | return false; |
| 4932 | |
| 4933 | pp_needs_newline (pp: global_dc->get_reference_printer ()) = true; |
| 4934 | diagnostic_set_last_function (global_dc, |
| 4935 | (diagnostics::diagnostic_info *) nullptr); |
| 4936 | |
| 4937 | return true; |
| 4938 | } |
| 4939 | |
| 4940 | /* Set the cmi repo. Strip trailing '/', '.' becomes NULL. */ |
| 4941 | |
| 4942 | static void |
| 4943 | set_cmi_repo (const char *r) |
| 4944 | { |
| 4945 | XDELETEVEC (cmi_repo); |
| 4946 | XDELETEVEC (cmi_path); |
| 4947 | cmi_path_alloc = 0; |
| 4948 | |
| 4949 | cmi_repo = NULL; |
| 4950 | cmi_repo_length = 0; |
| 4951 | |
| 4952 | if (!r || !r[0]) |
| 4953 | return; |
| 4954 | |
| 4955 | size_t len = strlen (s: r); |
| 4956 | cmi_repo = XNEWVEC (char, len + 1); |
| 4957 | memcpy (dest: cmi_repo, src: r, n: len + 1); |
| 4958 | |
| 4959 | if (len > 1 && IS_DIR_SEPARATOR (cmi_repo[len-1])) |
| 4960 | len--; |
| 4961 | if (len == 1 && cmi_repo[0] == '.') |
| 4962 | len--; |
| 4963 | cmi_repo[len] = 0; |
| 4964 | cmi_repo_length = len; |
| 4965 | } |
| 4966 | |
| 4967 | /* TO is a repo-relative name. Provide one that we may use from where |
| 4968 | we are. */ |
| 4969 | |
| 4970 | static const char * |
| 4971 | maybe_add_cmi_prefix (const char *to, size_t *len_p = NULL) |
| 4972 | { |
| 4973 | size_t len = len_p || cmi_repo_length ? strlen (s: to) : 0; |
| 4974 | |
| 4975 | if (cmi_repo_length && !IS_ABSOLUTE_PATH (to)) |
| 4976 | { |
| 4977 | if (cmi_path_alloc < cmi_repo_length + len + 2) |
| 4978 | { |
| 4979 | XDELETEVEC (cmi_path); |
| 4980 | cmi_path_alloc = cmi_repo_length + len * 2 + 2; |
| 4981 | cmi_path = XNEWVEC (char, cmi_path_alloc); |
| 4982 | |
| 4983 | memcpy (dest: cmi_path, src: cmi_repo, n: cmi_repo_length); |
| 4984 | cmi_path[cmi_repo_length] = DIR_SEPARATOR; |
| 4985 | } |
| 4986 | |
| 4987 | memcpy (dest: &cmi_path[cmi_repo_length + 1], src: to, n: len + 1); |
| 4988 | len += cmi_repo_length + 1; |
| 4989 | to = cmi_path; |
| 4990 | } |
| 4991 | |
| 4992 | if (len_p) |
| 4993 | *len_p = len; |
| 4994 | |
| 4995 | return to; |
| 4996 | } |
| 4997 | |
| 4998 | /* Try and create the directories of PATH. */ |
| 4999 | |
| 5000 | static void |
| 5001 | create_dirs (char *path) |
| 5002 | { |
| 5003 | char *base = path; |
| 5004 | /* Skip past initial slashes of absolute path. */ |
| 5005 | while (IS_DIR_SEPARATOR (*base)) |
| 5006 | base++; |
| 5007 | |
| 5008 | /* Try and create the missing directories. */ |
| 5009 | for (; *base; base++) |
| 5010 | if (IS_DIR_SEPARATOR (*base)) |
| 5011 | { |
| 5012 | char sep = *base; |
| 5013 | *base = 0; |
| 5014 | int failed = mkdir (path: path, S_IRWXU | S_IRWXG | S_IRWXO); |
| 5015 | dump () && dump ("Mkdir ('%s') errno:=%u" , path, failed ? errno : 0); |
| 5016 | *base = sep; |
| 5017 | if (failed |
| 5018 | /* Maybe racing with another creator (of a *different* |
| 5019 | module). */ |
| 5020 | && errno != EEXIST) |
| 5021 | break; |
| 5022 | } |
| 5023 | } |
| 5024 | |
| 5025 | /* Given a CLASSTYPE_DECL_LIST VALUE get the template friend decl, |
| 5026 | if that's what this is. */ |
| 5027 | |
| 5028 | static tree |
| 5029 | friend_from_decl_list (tree frnd) |
| 5030 | { |
| 5031 | tree res = frnd; |
| 5032 | |
| 5033 | if (TREE_CODE (frnd) != TEMPLATE_DECL) |
| 5034 | { |
| 5035 | tree tmpl = NULL_TREE; |
| 5036 | if (TYPE_P (frnd)) |
| 5037 | { |
| 5038 | res = TYPE_NAME (frnd); |
| 5039 | if (CLASS_TYPE_P (frnd) |
| 5040 | && CLASSTYPE_TEMPLATE_INFO (frnd)) |
| 5041 | tmpl = CLASSTYPE_TI_TEMPLATE (frnd); |
| 5042 | } |
| 5043 | else if (DECL_TEMPLATE_INFO (frnd)) |
| 5044 | { |
| 5045 | tmpl = DECL_TI_TEMPLATE (frnd); |
| 5046 | if (TREE_CODE (tmpl) != TEMPLATE_DECL) |
| 5047 | tmpl = NULL_TREE; |
| 5048 | } |
| 5049 | |
| 5050 | if (tmpl && DECL_TEMPLATE_RESULT (tmpl) == res) |
| 5051 | res = tmpl; |
| 5052 | } |
| 5053 | |
| 5054 | return res; |
| 5055 | } |
| 5056 | |
| 5057 | static tree |
| 5058 | find_enum_member (tree ctx, tree name) |
| 5059 | { |
| 5060 | for (tree values = TYPE_VALUES (ctx); |
| 5061 | values; values = TREE_CHAIN (values)) |
| 5062 | if (DECL_NAME (TREE_VALUE (values)) == name) |
| 5063 | return TREE_VALUE (values); |
| 5064 | |
| 5065 | return NULL_TREE; |
| 5066 | } |
| 5067 | |
| 5068 | /********************************************************************/ |
| 5069 | /* Instrumentation gathered writing bytes. */ |
| 5070 | |
| 5071 | void |
| 5072 | bytes_out::instrument () |
| 5073 | { |
| 5074 | dump ("Wrote %u bytes in %u blocks" , lengths[3], spans[3]); |
| 5075 | dump ("Wrote %u bits in %u bytes" , lengths[0] + lengths[1], lengths[2]); |
| 5076 | for (unsigned ix = 0; ix < 2; ix++) |
| 5077 | dump (" %u %s spans of %R bits" , spans[ix], |
| 5078 | ix ? "one" : "zero" , lengths[ix], spans[ix]); |
| 5079 | dump (" %u blocks with %R bits padding" , spans[2], |
| 5080 | lengths[2] * 8 - (lengths[0] + lengths[1]), spans[2]); |
| 5081 | } |
| 5082 | |
| 5083 | /* Instrumentation gathered writing trees. */ |
| 5084 | void |
| 5085 | trees_out::instrument () |
| 5086 | { |
| 5087 | if (dump ("" )) |
| 5088 | { |
| 5089 | bytes_out::instrument (); |
| 5090 | dump ("Wrote:" ); |
| 5091 | dump (" %u decl trees" , decl_val_count); |
| 5092 | dump (" %u other trees" , tree_val_count); |
| 5093 | dump (" %u back references" , back_ref_count); |
| 5094 | dump (" %u TU-local entities" , tu_local_count); |
| 5095 | dump (" %u null trees" , null_count); |
| 5096 | } |
| 5097 | } |
| 5098 | |
| 5099 | /* Setup and teardown for a tree walk. */ |
| 5100 | |
| 5101 | void |
| 5102 | trees_out::begin () |
| 5103 | { |
| 5104 | gcc_assert (!streaming_p () || !tree_map.elements ()); |
| 5105 | |
| 5106 | mark_trees (); |
| 5107 | if (streaming_p ()) |
| 5108 | parent::begin (); |
| 5109 | } |
| 5110 | |
| 5111 | unsigned |
| 5112 | trees_out::end (elf_out *sink, unsigned name, unsigned *crc_ptr) |
| 5113 | { |
| 5114 | gcc_checking_assert (streaming_p ()); |
| 5115 | |
| 5116 | unmark_trees (); |
| 5117 | return parent::end (sink, name, crc_ptr); |
| 5118 | } |
| 5119 | |
| 5120 | void |
| 5121 | trees_out::end () |
| 5122 | { |
| 5123 | gcc_assert (!streaming_p ()); |
| 5124 | |
| 5125 | unmark_trees (); |
| 5126 | /* Do not parent::end -- we weren't streaming. */ |
| 5127 | } |
| 5128 | |
| 5129 | void |
| 5130 | trees_out::mark_trees () |
| 5131 | { |
| 5132 | if (size_t size = tree_map.elements ()) |
| 5133 | { |
| 5134 | /* This isn't our first rodeo, destroy and recreate the |
| 5135 | tree_map. I'm a bad bad man. Use the previous size as a |
| 5136 | guess for the next one (so not all bad). */ |
| 5137 | tree_map.~ptr_int_hash_map (); |
| 5138 | new (&tree_map) ptr_int_hash_map (size); |
| 5139 | } |
| 5140 | |
| 5141 | /* Install the fixed trees, with +ve references. */ |
| 5142 | unsigned limit = fixed_trees->length (); |
| 5143 | for (unsigned ix = 0; ix != limit; ix++) |
| 5144 | { |
| 5145 | tree val = (*fixed_trees)[ix]; |
| 5146 | bool existed = tree_map.put (k: val, v: ix + tag_fixed); |
| 5147 | gcc_checking_assert (!TREE_VISITED (val) && !existed); |
| 5148 | TREE_VISITED (val) = true; |
| 5149 | } |
| 5150 | |
| 5151 | ref_num = 0; |
| 5152 | } |
| 5153 | |
| 5154 | /* Unmark the trees we encountered */ |
| 5155 | |
| 5156 | void |
| 5157 | trees_out::unmark_trees () |
| 5158 | { |
| 5159 | ptr_int_hash_map::iterator end (tree_map.end ()); |
| 5160 | for (ptr_int_hash_map::iterator iter (tree_map.begin ()); iter != end; ++iter) |
| 5161 | { |
| 5162 | tree node = reinterpret_cast<tree> ((*iter).first); |
| 5163 | int ref = (*iter).second; |
| 5164 | /* We should have visited the node, and converted its mergeable |
| 5165 | reference to a regular reference. */ |
| 5166 | gcc_checking_assert (TREE_VISITED (node) |
| 5167 | && (ref <= tag_backref || ref >= tag_fixed)); |
| 5168 | TREE_VISITED (node) = false; |
| 5169 | } |
| 5170 | } |
| 5171 | |
| 5172 | /* Mark DECL for by-value walking. We do this by inserting it into |
| 5173 | the tree map with a reference of zero. May be called multiple |
| 5174 | times on the same node. */ |
| 5175 | |
| 5176 | void |
| 5177 | trees_out::mark_by_value (tree decl) |
| 5178 | { |
| 5179 | gcc_checking_assert (DECL_P (decl) |
| 5180 | /* Enum consts are INTEGER_CSTS. */ |
| 5181 | || TREE_CODE (decl) == INTEGER_CST |
| 5182 | || TREE_CODE (decl) == TREE_BINFO); |
| 5183 | |
| 5184 | if (TREE_VISITED (decl)) |
| 5185 | /* Must already be forced or fixed. */ |
| 5186 | gcc_checking_assert (*tree_map.get (decl) >= tag_value); |
| 5187 | else |
| 5188 | { |
| 5189 | bool existed = tree_map.put (k: decl, v: tag_value); |
| 5190 | gcc_checking_assert (!existed); |
| 5191 | TREE_VISITED (decl) = true; |
| 5192 | } |
| 5193 | } |
| 5194 | |
| 5195 | int |
| 5196 | trees_out::get_tag (tree t) |
| 5197 | { |
| 5198 | gcc_checking_assert (TREE_VISITED (t)); |
| 5199 | return *tree_map.get (k: t); |
| 5200 | } |
| 5201 | |
| 5202 | /* Insert T into the map, return its tag number. */ |
| 5203 | |
| 5204 | int |
| 5205 | trees_out::insert (tree t, walk_kind walk) |
| 5206 | { |
| 5207 | gcc_checking_assert (walk != WK_normal || !TREE_VISITED (t)); |
| 5208 | int tag = --ref_num; |
| 5209 | bool existed; |
| 5210 | int &slot = tree_map.get_or_insert (k: t, existed: &existed); |
| 5211 | gcc_checking_assert (TREE_VISITED (t) == existed |
| 5212 | && (!existed |
| 5213 | || (walk == WK_value && slot == tag_value))); |
| 5214 | TREE_VISITED (t) = true; |
| 5215 | slot = tag; |
| 5216 | |
| 5217 | return tag; |
| 5218 | } |
| 5219 | |
| 5220 | /* Insert T into the backreference array. Return its back reference |
| 5221 | number. */ |
| 5222 | |
| 5223 | int |
| 5224 | trees_in::insert (tree t) |
| 5225 | { |
| 5226 | gcc_checking_assert (t || get_overrun ()); |
| 5227 | back_refs.safe_push (obj: t); |
| 5228 | return -(int)back_refs.length (); |
| 5229 | } |
| 5230 | |
| 5231 | /* A chained set of decls. */ |
| 5232 | |
| 5233 | void |
| 5234 | trees_out::chained_decls (tree decls) |
| 5235 | { |
| 5236 | for (; decls; decls = DECL_CHAIN (decls)) |
| 5237 | tree_node (decls); |
| 5238 | tree_node (NULL_TREE); |
| 5239 | } |
| 5240 | |
| 5241 | tree |
| 5242 | trees_in::chained_decls () |
| 5243 | { |
| 5244 | tree decls = NULL_TREE; |
| 5245 | for (tree *chain = &decls;;) |
| 5246 | if (tree decl = tree_node ()) |
| 5247 | { |
| 5248 | if (!DECL_P (decl) || DECL_CHAIN (decl)) |
| 5249 | { |
| 5250 | set_overrun (); |
| 5251 | break; |
| 5252 | } |
| 5253 | *chain = decl; |
| 5254 | chain = &DECL_CHAIN (decl); |
| 5255 | } |
| 5256 | else |
| 5257 | break; |
| 5258 | |
| 5259 | return decls; |
| 5260 | } |
| 5261 | |
| 5262 | /* A vector of decls following DECL_CHAIN. */ |
| 5263 | |
| 5264 | void |
| 5265 | trees_out::vec_chained_decls (tree decls) |
| 5266 | { |
| 5267 | if (streaming_p ()) |
| 5268 | { |
| 5269 | unsigned len = 0; |
| 5270 | |
| 5271 | for (tree decl = decls; decl; decl = DECL_CHAIN (decl)) |
| 5272 | len++; |
| 5273 | u (v: len); |
| 5274 | } |
| 5275 | |
| 5276 | for (tree decl = decls; decl; decl = DECL_CHAIN (decl)) |
| 5277 | { |
| 5278 | if (DECL_IMPLICIT_TYPEDEF_P (decl) |
| 5279 | && TYPE_NAME (TREE_TYPE (decl)) != decl) |
| 5280 | /* An anonynmous struct with a typedef name. An odd thing to |
| 5281 | write. */ |
| 5282 | tree_node (NULL_TREE); |
| 5283 | else |
| 5284 | tree_node (decl); |
| 5285 | } |
| 5286 | } |
| 5287 | |
| 5288 | vec<tree, va_heap> * |
| 5289 | trees_in::vec_chained_decls () |
| 5290 | { |
| 5291 | vec<tree, va_heap> *v = NULL; |
| 5292 | |
| 5293 | if (unsigned len = u ()) |
| 5294 | { |
| 5295 | vec_alloc (v, nelems: len); |
| 5296 | |
| 5297 | for (unsigned ix = 0; ix < len; ix++) |
| 5298 | { |
| 5299 | tree decl = tree_node (); |
| 5300 | if (decl && !DECL_P (decl)) |
| 5301 | { |
| 5302 | set_overrun (); |
| 5303 | break; |
| 5304 | } |
| 5305 | v->quick_push (obj: decl); |
| 5306 | } |
| 5307 | |
| 5308 | if (get_overrun ()) |
| 5309 | { |
| 5310 | vec_free (v); |
| 5311 | v = NULL; |
| 5312 | } |
| 5313 | } |
| 5314 | |
| 5315 | return v; |
| 5316 | } |
| 5317 | |
| 5318 | /* A vector of trees. */ |
| 5319 | |
| 5320 | void |
| 5321 | trees_out::tree_vec (vec<tree, va_gc> *v) |
| 5322 | { |
| 5323 | unsigned len = vec_safe_length (v); |
| 5324 | if (streaming_p ()) |
| 5325 | u (v: len); |
| 5326 | for (unsigned ix = 0; ix != len; ix++) |
| 5327 | tree_node ((*v)[ix]); |
| 5328 | } |
| 5329 | |
| 5330 | vec<tree, va_gc> * |
| 5331 | trees_in::tree_vec () |
| 5332 | { |
| 5333 | vec<tree, va_gc> *v = NULL; |
| 5334 | if (unsigned len = u ()) |
| 5335 | { |
| 5336 | vec_alloc (v, nelems: len); |
| 5337 | for (unsigned ix = 0; ix != len; ix++) |
| 5338 | v->quick_push (obj: tree_node ()); |
| 5339 | } |
| 5340 | return v; |
| 5341 | } |
| 5342 | |
| 5343 | /* A vector of tree pairs. */ |
| 5344 | |
| 5345 | void |
| 5346 | trees_out::tree_pair_vec (vec<tree_pair_s, va_gc> *v) |
| 5347 | { |
| 5348 | unsigned len = vec_safe_length (v); |
| 5349 | if (streaming_p ()) |
| 5350 | u (v: len); |
| 5351 | if (len) |
| 5352 | for (unsigned ix = 0; ix != len; ix++) |
| 5353 | { |
| 5354 | tree_pair_s const &s = (*v)[ix]; |
| 5355 | tree_node (s.purpose); |
| 5356 | tree_node (s.value); |
| 5357 | } |
| 5358 | } |
| 5359 | |
| 5360 | vec<tree_pair_s, va_gc> * |
| 5361 | trees_in::tree_pair_vec () |
| 5362 | { |
| 5363 | vec<tree_pair_s, va_gc> *v = NULL; |
| 5364 | if (unsigned len = u ()) |
| 5365 | { |
| 5366 | vec_alloc (v, nelems: len); |
| 5367 | for (unsigned ix = 0; ix != len; ix++) |
| 5368 | { |
| 5369 | tree_pair_s s; |
| 5370 | s.purpose = tree_node (); |
| 5371 | s.value = tree_node (); |
| 5372 | v->quick_push (obj: s); |
| 5373 | } |
| 5374 | } |
| 5375 | return v; |
| 5376 | } |
| 5377 | |
| 5378 | void |
| 5379 | trees_out::tree_list (tree list, bool has_purpose) |
| 5380 | { |
| 5381 | for (; list; list = TREE_CHAIN (list)) |
| 5382 | { |
| 5383 | gcc_checking_assert (TREE_VALUE (list)); |
| 5384 | tree_node (TREE_VALUE (list)); |
| 5385 | if (has_purpose) |
| 5386 | tree_node (TREE_PURPOSE (list)); |
| 5387 | } |
| 5388 | tree_node (NULL_TREE); |
| 5389 | } |
| 5390 | |
| 5391 | tree |
| 5392 | trees_in::tree_list (bool has_purpose) |
| 5393 | { |
| 5394 | tree res = NULL_TREE; |
| 5395 | |
| 5396 | for (tree *chain = &res; tree value = tree_node (); |
| 5397 | chain = &TREE_CHAIN (*chain)) |
| 5398 | { |
| 5399 | tree purpose = has_purpose ? tree_node () : NULL_TREE; |
| 5400 | *chain = build_tree_list (purpose, value); |
| 5401 | } |
| 5402 | |
| 5403 | return res; |
| 5404 | } |
| 5405 | |
| 5406 | #define CASE_OMP_SIMD_CODE \ |
| 5407 | case OMP_SIMD: \ |
| 5408 | case OMP_STRUCTURED_BLOCK: \ |
| 5409 | case OMP_LOOP: \ |
| 5410 | case OMP_ORDERED: \ |
| 5411 | case OMP_TILE: \ |
| 5412 | case OMP_UNROLL |
| 5413 | #define CASE_OMP_CODE \ |
| 5414 | case OMP_PARALLEL: \ |
| 5415 | case OMP_TASK: \ |
| 5416 | case OMP_FOR: \ |
| 5417 | case OMP_DISTRIBUTE: \ |
| 5418 | case OMP_TASKLOOP: \ |
| 5419 | case OMP_TEAMS: \ |
| 5420 | case OMP_TARGET_DATA: \ |
| 5421 | case OMP_TARGET: \ |
| 5422 | case OMP_SECTIONS: \ |
| 5423 | case OMP_CRITICAL: \ |
| 5424 | case OMP_SINGLE: \ |
| 5425 | case OMP_SCOPE: \ |
| 5426 | case OMP_TASKGROUP: \ |
| 5427 | case OMP_MASKED: \ |
| 5428 | case OMP_DISPATCH: \ |
| 5429 | case OMP_INTEROP: \ |
| 5430 | case OMP_MASTER: \ |
| 5431 | case OMP_TARGET_UPDATE: \ |
| 5432 | case OMP_TARGET_ENTER_DATA: \ |
| 5433 | case OMP_TARGET_EXIT_DATA: \ |
| 5434 | case OMP_METADIRECTIVE: \ |
| 5435 | case OMP_ATOMIC: \ |
| 5436 | case OMP_ATOMIC_READ: \ |
| 5437 | case OMP_ATOMIC_CAPTURE_OLD: \ |
| 5438 | case OMP_ATOMIC_CAPTURE_NEW |
| 5439 | #define CASE_OACC_CODE \ |
| 5440 | case OACC_PARALLEL: \ |
| 5441 | case OACC_KERNELS: \ |
| 5442 | case OACC_SERIAL: \ |
| 5443 | case OACC_DATA: \ |
| 5444 | case OACC_HOST_DATA: \ |
| 5445 | case OACC_LOOP: \ |
| 5446 | case OACC_CACHE: \ |
| 5447 | case OACC_DECLARE: \ |
| 5448 | case OACC_ENTER_DATA: \ |
| 5449 | case OACC_EXIT_DATA: \ |
| 5450 | case OACC_UPDATE |
| 5451 | |
| 5452 | /* Start tree write. Write information to allocate the receiving |
| 5453 | node. */ |
| 5454 | |
| 5455 | void |
| 5456 | trees_out::start (tree t, bool code_streamed) |
| 5457 | { |
| 5458 | if (TYPE_P (t)) |
| 5459 | { |
| 5460 | enum tree_code code = TREE_CODE (t); |
| 5461 | gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t); |
| 5462 | /* All these types are TYPE_NON_COMMON. */ |
| 5463 | gcc_checking_assert (code == RECORD_TYPE |
| 5464 | || code == UNION_TYPE |
| 5465 | || code == ENUMERAL_TYPE |
| 5466 | || code == TEMPLATE_TYPE_PARM |
| 5467 | || code == TEMPLATE_TEMPLATE_PARM |
| 5468 | || code == BOUND_TEMPLATE_TEMPLATE_PARM); |
| 5469 | } |
| 5470 | |
| 5471 | if (!code_streamed) |
| 5472 | u (TREE_CODE (t)); |
| 5473 | |
| 5474 | switch (TREE_CODE (t)) |
| 5475 | { |
| 5476 | default: |
| 5477 | if (VL_EXP_CLASS_P (t)) |
| 5478 | u (VL_EXP_OPERAND_LENGTH (t)); |
| 5479 | break; |
| 5480 | |
| 5481 | case INTEGER_CST: |
| 5482 | u (TREE_INT_CST_NUNITS (t)); |
| 5483 | u (TREE_INT_CST_EXT_NUNITS (t)); |
| 5484 | break; |
| 5485 | |
| 5486 | case OMP_CLAUSE: |
| 5487 | u (OMP_CLAUSE_CODE (t)); |
| 5488 | break; |
| 5489 | |
| 5490 | CASE_OMP_SIMD_CODE: |
| 5491 | state->extensions |= SE_OPENMP_SIMD; |
| 5492 | break; |
| 5493 | |
| 5494 | CASE_OMP_CODE: |
| 5495 | state->extensions |= SE_OPENMP; |
| 5496 | break; |
| 5497 | |
| 5498 | CASE_OACC_CODE: |
| 5499 | state->extensions |= SE_OPENACC; |
| 5500 | break; |
| 5501 | |
| 5502 | case STRING_CST: |
| 5503 | str (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t)); |
| 5504 | break; |
| 5505 | |
| 5506 | case RAW_DATA_CST: |
| 5507 | if (RAW_DATA_OWNER (t) == NULL_TREE) |
| 5508 | { |
| 5509 | /* Stream RAW_DATA_CST with no owner (i.e. data pointing |
| 5510 | into libcpp buffers) as something we can stream in as |
| 5511 | STRING_CST which owns the data. */ |
| 5512 | u (v: 0); |
| 5513 | /* Can't use str (RAW_DATA_POINTER (t), RAW_DATA_LENGTH (t)); |
| 5514 | here as there isn't a null termination after it. */ |
| 5515 | z (RAW_DATA_LENGTH (t)); |
| 5516 | if (RAW_DATA_LENGTH (t)) |
| 5517 | if (void *ptr = buf (RAW_DATA_LENGTH (t) + 1)) |
| 5518 | { |
| 5519 | memcpy (dest: ptr, RAW_DATA_POINTER (t), RAW_DATA_LENGTH (t)); |
| 5520 | ((char *) ptr)[RAW_DATA_LENGTH (t)] = '\0'; |
| 5521 | } |
| 5522 | } |
| 5523 | else |
| 5524 | { |
| 5525 | gcc_assert (RAW_DATA_LENGTH (t)); |
| 5526 | u (RAW_DATA_LENGTH (t)); |
| 5527 | } |
| 5528 | break; |
| 5529 | |
| 5530 | case VECTOR_CST: |
| 5531 | u (VECTOR_CST_LOG2_NPATTERNS (t)); |
| 5532 | u (VECTOR_CST_NELTS_PER_PATTERN (t)); |
| 5533 | break; |
| 5534 | |
| 5535 | case TREE_BINFO: |
| 5536 | u (BINFO_N_BASE_BINFOS (t)); |
| 5537 | break; |
| 5538 | |
| 5539 | case TREE_VEC: |
| 5540 | u (TREE_VEC_LENGTH (t)); |
| 5541 | break; |
| 5542 | |
| 5543 | case FIXED_CST: |
| 5544 | gcc_unreachable (); /* Not supported in C++. */ |
| 5545 | break; |
| 5546 | |
| 5547 | case IDENTIFIER_NODE: |
| 5548 | case SSA_NAME: |
| 5549 | case TARGET_MEM_REF: |
| 5550 | case TRANSLATION_UNIT_DECL: |
| 5551 | /* We shouldn't meet these. */ |
| 5552 | gcc_unreachable (); |
| 5553 | break; |
| 5554 | } |
| 5555 | } |
| 5556 | |
| 5557 | /* Start tree read. Allocate the receiving node. */ |
| 5558 | |
| 5559 | tree |
| 5560 | trees_in::start (unsigned code) |
| 5561 | { |
| 5562 | tree t = NULL_TREE; |
| 5563 | |
| 5564 | if (!code) |
| 5565 | code = u (); |
| 5566 | |
| 5567 | switch (code) |
| 5568 | { |
| 5569 | default: |
| 5570 | if (code >= MAX_TREE_CODES) |
| 5571 | { |
| 5572 | fail: |
| 5573 | set_overrun (); |
| 5574 | return NULL_TREE; |
| 5575 | } |
| 5576 | else if (TREE_CODE_CLASS (code) == tcc_vl_exp) |
| 5577 | { |
| 5578 | unsigned ops = u (); |
| 5579 | t = build_vl_exp (tree_code (code), ops); |
| 5580 | } |
| 5581 | else |
| 5582 | t = make_node (tree_code (code)); |
| 5583 | break; |
| 5584 | |
| 5585 | case INTEGER_CST: |
| 5586 | { |
| 5587 | unsigned n = u (); |
| 5588 | unsigned e = u (); |
| 5589 | t = make_int_cst (n, e); |
| 5590 | } |
| 5591 | break; |
| 5592 | |
| 5593 | case OMP_CLAUSE: |
| 5594 | t = build_omp_clause (UNKNOWN_LOCATION, omp_clause_code (u ())); |
| 5595 | break; |
| 5596 | |
| 5597 | CASE_OMP_SIMD_CODE: |
| 5598 | if (!(state->extensions & SE_OPENMP_SIMD)) |
| 5599 | goto fail; |
| 5600 | t = make_node (tree_code (code)); |
| 5601 | break; |
| 5602 | |
| 5603 | CASE_OMP_CODE: |
| 5604 | if (!(state->extensions & SE_OPENMP)) |
| 5605 | goto fail; |
| 5606 | t = make_node (tree_code (code)); |
| 5607 | break; |
| 5608 | |
| 5609 | CASE_OACC_CODE: |
| 5610 | if (!(state->extensions & SE_OPENACC)) |
| 5611 | goto fail; |
| 5612 | t = make_node (tree_code (code)); |
| 5613 | break; |
| 5614 | |
| 5615 | case STRING_CST: |
| 5616 | { |
| 5617 | size_t l; |
| 5618 | const char *chars = str (len_p: &l); |
| 5619 | t = build_string (l, chars); |
| 5620 | } |
| 5621 | break; |
| 5622 | |
| 5623 | case RAW_DATA_CST: |
| 5624 | { |
| 5625 | size_t l = u (); |
| 5626 | if (l == 0) |
| 5627 | { |
| 5628 | /* Stream in RAW_DATA_CST with no owner as STRING_CST |
| 5629 | which owns the data. */ |
| 5630 | const char *chars = str (len_p: &l); |
| 5631 | t = build_string (l, chars); |
| 5632 | } |
| 5633 | else |
| 5634 | { |
| 5635 | t = make_node (RAW_DATA_CST); |
| 5636 | RAW_DATA_LENGTH (t) = l; |
| 5637 | } |
| 5638 | } |
| 5639 | break; |
| 5640 | |
| 5641 | case VECTOR_CST: |
| 5642 | { |
| 5643 | unsigned log2_npats = u (); |
| 5644 | unsigned elts_per = u (); |
| 5645 | t = make_vector (log2_npats, elts_per); |
| 5646 | } |
| 5647 | break; |
| 5648 | |
| 5649 | case TREE_BINFO: |
| 5650 | t = make_tree_binfo (u ()); |
| 5651 | break; |
| 5652 | |
| 5653 | case TREE_VEC: |
| 5654 | t = make_tree_vec (u ()); |
| 5655 | break; |
| 5656 | |
| 5657 | case FIXED_CST: |
| 5658 | case IDENTIFIER_NODE: |
| 5659 | case SSA_NAME: |
| 5660 | case TARGET_MEM_REF: |
| 5661 | case TRANSLATION_UNIT_DECL: |
| 5662 | goto fail; |
| 5663 | } |
| 5664 | |
| 5665 | return t; |
| 5666 | } |
| 5667 | |
| 5668 | /* The kinds of interface an importer could have for a decl. */ |
| 5669 | |
| 5670 | enum class importer_interface { |
| 5671 | unknown, /* The definition may or may not need to be emitted. */ |
| 5672 | external, /* The definition can always be found in another TU. */ |
| 5673 | internal, /* The definition should be emitted in the importer's TU. */ |
| 5674 | always_emit, /* The definition must be emitted in the importer's TU, |
| 5675 | regardless of if it's used or not. */ |
| 5676 | }; |
| 5677 | |
| 5678 | /* Returns what kind of interface an importer will have of DECL. */ |
| 5679 | |
| 5680 | static importer_interface |
| 5681 | get_importer_interface (tree decl) |
| 5682 | { |
| 5683 | /* Internal linkage entities must be emitted in each importer if |
| 5684 | there is a definition available. */ |
| 5685 | if (!TREE_PUBLIC (decl)) |
| 5686 | return importer_interface::internal; |
| 5687 | |
| 5688 | /* Other entities that aren't vague linkage are either not definitions |
| 5689 | or will be publicly emitted in this TU, so importers can just refer |
| 5690 | to an external definition. */ |
| 5691 | if (!vague_linkage_p (decl)) |
| 5692 | return importer_interface::external; |
| 5693 | |
| 5694 | /* For explicit instantiations, importers can always rely on there |
| 5695 | being a definition in another TU, unless this is a definition |
| 5696 | in a header module: in which case the importer will always need |
| 5697 | to emit it. */ |
| 5698 | if (DECL_LANG_SPECIFIC (decl) |
| 5699 | && DECL_EXPLICIT_INSTANTIATION (decl)) |
| 5700 | return (header_module_p () && !DECL_EXTERNAL (decl) |
| 5701 | ? importer_interface::always_emit |
| 5702 | : importer_interface::external); |
| 5703 | |
| 5704 | /* A gnu_inline function is never emitted in any TU. */ |
| 5705 | if (TREE_CODE (decl) == FUNCTION_DECL |
| 5706 | && DECL_DECLARED_INLINE_P (decl) |
| 5707 | && lookup_attribute (attr_name: "gnu_inline" , DECL_ATTRIBUTES (decl))) |
| 5708 | return importer_interface::external; |
| 5709 | |
| 5710 | /* Everything else has vague linkage. */ |
| 5711 | return importer_interface::unknown; |
| 5712 | } |
| 5713 | |
| 5714 | /* The structure streamers access the raw fields, because the |
| 5715 | alternative, of using the accessor macros can require using |
| 5716 | different accessors for the same underlying field, depending on the |
| 5717 | tree code. That's both confusing and annoying. */ |
| 5718 | |
| 5719 | /* Read & write the core boolean flags. */ |
| 5720 | |
| 5721 | void |
| 5722 | trees_out::core_bools (tree t, bits_out& bits) |
| 5723 | { |
| 5724 | #define WB(X) (bits.b (X)) |
| 5725 | /* Stream X if COND holds, and if !COND stream a dummy value so that the |
| 5726 | overall number of bits streamed is independent of the runtime value |
| 5727 | of COND, which allows the compiler to better optimize this function. */ |
| 5728 | #define WB_IF(COND, X) WB ((COND) ? (X) : false) |
| 5729 | tree_code code = TREE_CODE (t); |
| 5730 | |
| 5731 | WB (t->base.side_effects_flag); |
| 5732 | WB (t->base.constant_flag); |
| 5733 | WB (t->base.addressable_flag); |
| 5734 | WB (t->base.volatile_flag); |
| 5735 | WB (t->base.readonly_flag); |
| 5736 | /* base.asm_written_flag is a property of the current TU's use of |
| 5737 | this decl. */ |
| 5738 | WB (t->base.nowarning_flag); |
| 5739 | /* base.visited read as zero (it's set for writer, because that's |
| 5740 | how we mark nodes). */ |
| 5741 | /* base.used_flag is not streamed. Readers may set TREE_USED of |
| 5742 | decls they use. */ |
| 5743 | WB (t->base.nothrow_flag); |
| 5744 | WB (t->base.static_flag); |
| 5745 | /* This is TYPE_CACHED_VALUES_P for types. */ |
| 5746 | WB_IF (TREE_CODE_CLASS (code) != tcc_type, t->base.public_flag); |
| 5747 | WB (t->base.private_flag); |
| 5748 | WB (t->base.protected_flag); |
| 5749 | WB (t->base.deprecated_flag); |
| 5750 | WB (t->base.default_def_flag); |
| 5751 | |
| 5752 | switch (code) |
| 5753 | { |
| 5754 | case CALL_EXPR: |
| 5755 | case INTEGER_CST: |
| 5756 | case SSA_NAME: |
| 5757 | case TARGET_MEM_REF: |
| 5758 | case TREE_VEC: |
| 5759 | /* These use different base.u fields. */ |
| 5760 | return; |
| 5761 | |
| 5762 | default: |
| 5763 | WB (t->base.u.bits.lang_flag_0); |
| 5764 | bool flag_1 = t->base.u.bits.lang_flag_1; |
| 5765 | if (!flag_1) |
| 5766 | ; |
| 5767 | else if (code == TEMPLATE_INFO) |
| 5768 | /* This is TI_PENDING_TEMPLATE_FLAG, not relevant to reader. */ |
| 5769 | flag_1 = false; |
| 5770 | else if (code == VAR_DECL) |
| 5771 | { |
| 5772 | /* This is DECL_INITIALIZED_P. */ |
| 5773 | if (TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL) |
| 5774 | /* We'll set this when reading the definition. */ |
| 5775 | flag_1 = false; |
| 5776 | } |
| 5777 | WB (flag_1); |
| 5778 | WB (t->base.u.bits.lang_flag_2); |
| 5779 | WB (t->base.u.bits.lang_flag_3); |
| 5780 | WB (t->base.u.bits.lang_flag_4); |
| 5781 | WB (t->base.u.bits.lang_flag_5); |
| 5782 | WB (t->base.u.bits.lang_flag_6); |
| 5783 | WB (t->base.u.bits.saturating_flag); |
| 5784 | WB (t->base.u.bits.unsigned_flag); |
| 5785 | WB (t->base.u.bits.packed_flag); |
| 5786 | WB (t->base.u.bits.user_align); |
| 5787 | WB (t->base.u.bits.nameless_flag); |
| 5788 | WB (t->base.u.bits.atomic_flag); |
| 5789 | WB (t->base.u.bits.unavailable_flag); |
| 5790 | break; |
| 5791 | } |
| 5792 | |
| 5793 | if (TREE_CODE_CLASS (code) == tcc_type) |
| 5794 | { |
| 5795 | WB (t->type_common.no_force_blk_flag); |
| 5796 | WB (t->type_common.needs_constructing_flag); |
| 5797 | WB (t->type_common.transparent_aggr_flag); |
| 5798 | WB (t->type_common.restrict_flag); |
| 5799 | WB (t->type_common.string_flag); |
| 5800 | WB (t->type_common.lang_flag_0); |
| 5801 | WB (t->type_common.lang_flag_1); |
| 5802 | WB (t->type_common.lang_flag_2); |
| 5803 | WB (t->type_common.lang_flag_3); |
| 5804 | WB (t->type_common.lang_flag_4); |
| 5805 | WB (t->type_common.lang_flag_5); |
| 5806 | WB (t->type_common.lang_flag_6); |
| 5807 | WB (t->type_common.typeless_storage); |
| 5808 | } |
| 5809 | |
| 5810 | if (TREE_CODE_CLASS (code) != tcc_declaration) |
| 5811 | return; |
| 5812 | |
| 5813 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 5814 | { |
| 5815 | WB (t->decl_common.nonlocal_flag); |
| 5816 | WB (t->decl_common.virtual_flag); |
| 5817 | WB (t->decl_common.ignored_flag); |
| 5818 | WB (t->decl_common.abstract_flag); |
| 5819 | WB (t->decl_common.artificial_flag); |
| 5820 | WB (t->decl_common.preserve_flag); |
| 5821 | WB (t->decl_common.debug_expr_is_from); |
| 5822 | WB (t->decl_common.lang_flag_0); |
| 5823 | WB (t->decl_common.lang_flag_1); |
| 5824 | WB (t->decl_common.lang_flag_2); |
| 5825 | WB (t->decl_common.lang_flag_3); |
| 5826 | WB (t->decl_common.lang_flag_4); |
| 5827 | |
| 5828 | { |
| 5829 | /* This is DECL_INTERFACE_KNOWN: We should redetermine whether |
| 5830 | we need to import or export any vague-linkage entities on |
| 5831 | stream-in. */ |
| 5832 | bool interface_known = t->decl_common.lang_flag_5; |
| 5833 | if (interface_known |
| 5834 | && get_importer_interface (decl: t) == importer_interface::unknown) |
| 5835 | interface_known = false; |
| 5836 | WB (interface_known); |
| 5837 | } |
| 5838 | |
| 5839 | WB (t->decl_common.lang_flag_6); |
| 5840 | WB (t->decl_common.lang_flag_7); |
| 5841 | WB (t->decl_common.lang_flag_8); |
| 5842 | WB (t->decl_common.decl_flag_0); |
| 5843 | |
| 5844 | { |
| 5845 | /* DECL_EXTERNAL -> decl_flag_1 |
| 5846 | == it is defined elsewhere |
| 5847 | DECL_NOT_REALLY_EXTERN -> base.not_really_extern |
| 5848 | == that was a lie, it is here */ |
| 5849 | |
| 5850 | bool is_external = t->decl_common.decl_flag_1; |
| 5851 | /* maybe_emit_vtables relies on vtables being marked as |
| 5852 | DECL_EXTERNAL and DECL_NOT_REALLY_EXTERN before processing. */ |
| 5853 | if (!is_external && VAR_P (t) && DECL_VTABLE_OR_VTT_P (t)) |
| 5854 | is_external = true; |
| 5855 | /* Things we emit here might well be external from the POV of an |
| 5856 | importer. */ |
| 5857 | if (!is_external |
| 5858 | && VAR_OR_FUNCTION_DECL_P (t) |
| 5859 | && get_importer_interface (decl: t) == importer_interface::external) |
| 5860 | is_external = true; |
| 5861 | WB (is_external); |
| 5862 | } |
| 5863 | |
| 5864 | WB (t->decl_common.decl_flag_2); |
| 5865 | WB (t->decl_common.decl_flag_3); |
| 5866 | WB (t->decl_common.not_gimple_reg_flag); |
| 5867 | WB (t->decl_common.decl_by_reference_flag); |
| 5868 | WB (t->decl_common.decl_read_flag); |
| 5869 | WB (t->decl_common.decl_nonshareable_flag); |
| 5870 | WB (t->decl_common.decl_not_flexarray); |
| 5871 | } |
| 5872 | else |
| 5873 | return; |
| 5874 | |
| 5875 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 5876 | { |
| 5877 | WB (t->decl_with_vis.defer_output); |
| 5878 | WB (t->decl_with_vis.hard_register); |
| 5879 | WB (t->decl_with_vis.common_flag); |
| 5880 | WB (t->decl_with_vis.in_text_section); |
| 5881 | WB (t->decl_with_vis.in_constant_pool); |
| 5882 | WB (t->decl_with_vis.dllimport_flag); |
| 5883 | WB (t->decl_with_vis.weak_flag); |
| 5884 | WB (t->decl_with_vis.seen_in_bind_expr); |
| 5885 | WB (t->decl_with_vis.comdat_flag); |
| 5886 | WB (t->decl_with_vis.visibility_specified); |
| 5887 | WB (t->decl_with_vis.init_priority_p); |
| 5888 | WB (t->decl_with_vis.shadowed_for_var_p); |
| 5889 | WB (t->decl_with_vis.cxx_constructor); |
| 5890 | WB (t->decl_with_vis.cxx_destructor); |
| 5891 | WB (t->decl_with_vis.final); |
| 5892 | WB (t->decl_with_vis.regdecl_flag); |
| 5893 | } |
| 5894 | else |
| 5895 | return; |
| 5896 | |
| 5897 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
| 5898 | { |
| 5899 | WB (t->function_decl.static_ctor_flag); |
| 5900 | WB (t->function_decl.static_dtor_flag); |
| 5901 | WB (t->function_decl.uninlinable); |
| 5902 | WB (t->function_decl.possibly_inlined); |
| 5903 | WB (t->function_decl.novops_flag); |
| 5904 | WB (t->function_decl.returns_twice_flag); |
| 5905 | WB (t->function_decl.malloc_flag); |
| 5906 | WB (t->function_decl.declared_inline_flag); |
| 5907 | WB (t->function_decl.no_inline_warning_flag); |
| 5908 | WB (t->function_decl.no_instrument_function_entry_exit); |
| 5909 | WB (t->function_decl.no_limit_stack); |
| 5910 | WB (t->function_decl.disregard_inline_limits); |
| 5911 | WB (t->function_decl.pure_flag); |
| 5912 | WB (t->function_decl.looping_const_or_pure_flag); |
| 5913 | |
| 5914 | WB (t->function_decl.has_debug_args_flag); |
| 5915 | WB (t->function_decl.versioned_function); |
| 5916 | WB (t->function_decl.replaceable_operator); |
| 5917 | |
| 5918 | /* decl_type is a (misnamed) 2 bit discriminator. */ |
| 5919 | unsigned kind = (unsigned)t->function_decl.decl_type; |
| 5920 | WB ((kind >> 0) & 1); |
| 5921 | WB ((kind >> 1) & 1); |
| 5922 | } |
| 5923 | #undef WB_IF |
| 5924 | #undef WB |
| 5925 | } |
| 5926 | |
| 5927 | bool |
| 5928 | trees_in::core_bools (tree t, bits_in& bits) |
| 5929 | { |
| 5930 | #define RB(X) ((X) = bits.b ()) |
| 5931 | /* See the comment for WB_IF in trees_out::core_bools. */ |
| 5932 | #define RB_IF(COND, X) ((COND) ? RB (X) : bits.b ()) |
| 5933 | |
| 5934 | tree_code code = TREE_CODE (t); |
| 5935 | |
| 5936 | RB (t->base.side_effects_flag); |
| 5937 | RB (t->base.constant_flag); |
| 5938 | RB (t->base.addressable_flag); |
| 5939 | RB (t->base.volatile_flag); |
| 5940 | RB (t->base.readonly_flag); |
| 5941 | /* base.asm_written_flag is not streamed. */ |
| 5942 | RB (t->base.nowarning_flag); |
| 5943 | /* base.visited is not streamed. */ |
| 5944 | /* base.used_flag is not streamed. */ |
| 5945 | RB (t->base.nothrow_flag); |
| 5946 | RB (t->base.static_flag); |
| 5947 | RB_IF (TREE_CODE_CLASS (code) != tcc_type, t->base.public_flag); |
| 5948 | RB (t->base.private_flag); |
| 5949 | RB (t->base.protected_flag); |
| 5950 | RB (t->base.deprecated_flag); |
| 5951 | RB (t->base.default_def_flag); |
| 5952 | |
| 5953 | switch (code) |
| 5954 | { |
| 5955 | case CALL_EXPR: |
| 5956 | case INTEGER_CST: |
| 5957 | case SSA_NAME: |
| 5958 | case TARGET_MEM_REF: |
| 5959 | case TREE_VEC: |
| 5960 | /* These use different base.u fields. */ |
| 5961 | goto done; |
| 5962 | |
| 5963 | default: |
| 5964 | RB (t->base.u.bits.lang_flag_0); |
| 5965 | RB (t->base.u.bits.lang_flag_1); |
| 5966 | RB (t->base.u.bits.lang_flag_2); |
| 5967 | RB (t->base.u.bits.lang_flag_3); |
| 5968 | RB (t->base.u.bits.lang_flag_4); |
| 5969 | RB (t->base.u.bits.lang_flag_5); |
| 5970 | RB (t->base.u.bits.lang_flag_6); |
| 5971 | RB (t->base.u.bits.saturating_flag); |
| 5972 | RB (t->base.u.bits.unsigned_flag); |
| 5973 | RB (t->base.u.bits.packed_flag); |
| 5974 | RB (t->base.u.bits.user_align); |
| 5975 | RB (t->base.u.bits.nameless_flag); |
| 5976 | RB (t->base.u.bits.atomic_flag); |
| 5977 | RB (t->base.u.bits.unavailable_flag); |
| 5978 | break; |
| 5979 | } |
| 5980 | |
| 5981 | if (TREE_CODE_CLASS (code) == tcc_type) |
| 5982 | { |
| 5983 | RB (t->type_common.no_force_blk_flag); |
| 5984 | RB (t->type_common.needs_constructing_flag); |
| 5985 | RB (t->type_common.transparent_aggr_flag); |
| 5986 | RB (t->type_common.restrict_flag); |
| 5987 | RB (t->type_common.string_flag); |
| 5988 | RB (t->type_common.lang_flag_0); |
| 5989 | RB (t->type_common.lang_flag_1); |
| 5990 | RB (t->type_common.lang_flag_2); |
| 5991 | RB (t->type_common.lang_flag_3); |
| 5992 | RB (t->type_common.lang_flag_4); |
| 5993 | RB (t->type_common.lang_flag_5); |
| 5994 | RB (t->type_common.lang_flag_6); |
| 5995 | RB (t->type_common.typeless_storage); |
| 5996 | } |
| 5997 | |
| 5998 | if (TREE_CODE_CLASS (code) != tcc_declaration) |
| 5999 | goto done; |
| 6000 | |
| 6001 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 6002 | { |
| 6003 | RB (t->decl_common.nonlocal_flag); |
| 6004 | RB (t->decl_common.virtual_flag); |
| 6005 | RB (t->decl_common.ignored_flag); |
| 6006 | RB (t->decl_common.abstract_flag); |
| 6007 | RB (t->decl_common.artificial_flag); |
| 6008 | RB (t->decl_common.preserve_flag); |
| 6009 | RB (t->decl_common.debug_expr_is_from); |
| 6010 | RB (t->decl_common.lang_flag_0); |
| 6011 | RB (t->decl_common.lang_flag_1); |
| 6012 | RB (t->decl_common.lang_flag_2); |
| 6013 | RB (t->decl_common.lang_flag_3); |
| 6014 | RB (t->decl_common.lang_flag_4); |
| 6015 | RB (t->decl_common.lang_flag_5); |
| 6016 | RB (t->decl_common.lang_flag_6); |
| 6017 | RB (t->decl_common.lang_flag_7); |
| 6018 | RB (t->decl_common.lang_flag_8); |
| 6019 | RB (t->decl_common.decl_flag_0); |
| 6020 | RB (t->decl_common.decl_flag_1); |
| 6021 | RB (t->decl_common.decl_flag_2); |
| 6022 | RB (t->decl_common.decl_flag_3); |
| 6023 | RB (t->decl_common.not_gimple_reg_flag); |
| 6024 | RB (t->decl_common.decl_by_reference_flag); |
| 6025 | RB (t->decl_common.decl_read_flag); |
| 6026 | RB (t->decl_common.decl_nonshareable_flag); |
| 6027 | RB (t->decl_common.decl_not_flexarray); |
| 6028 | } |
| 6029 | else |
| 6030 | goto done; |
| 6031 | |
| 6032 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 6033 | { |
| 6034 | RB (t->decl_with_vis.defer_output); |
| 6035 | RB (t->decl_with_vis.hard_register); |
| 6036 | RB (t->decl_with_vis.common_flag); |
| 6037 | RB (t->decl_with_vis.in_text_section); |
| 6038 | RB (t->decl_with_vis.in_constant_pool); |
| 6039 | RB (t->decl_with_vis.dllimport_flag); |
| 6040 | RB (t->decl_with_vis.weak_flag); |
| 6041 | RB (t->decl_with_vis.seen_in_bind_expr); |
| 6042 | RB (t->decl_with_vis.comdat_flag); |
| 6043 | RB (t->decl_with_vis.visibility_specified); |
| 6044 | RB (t->decl_with_vis.init_priority_p); |
| 6045 | RB (t->decl_with_vis.shadowed_for_var_p); |
| 6046 | RB (t->decl_with_vis.cxx_constructor); |
| 6047 | RB (t->decl_with_vis.cxx_destructor); |
| 6048 | RB (t->decl_with_vis.final); |
| 6049 | RB (t->decl_with_vis.regdecl_flag); |
| 6050 | } |
| 6051 | else |
| 6052 | goto done; |
| 6053 | |
| 6054 | if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) |
| 6055 | { |
| 6056 | RB (t->function_decl.static_ctor_flag); |
| 6057 | RB (t->function_decl.static_dtor_flag); |
| 6058 | RB (t->function_decl.uninlinable); |
| 6059 | RB (t->function_decl.possibly_inlined); |
| 6060 | RB (t->function_decl.novops_flag); |
| 6061 | RB (t->function_decl.returns_twice_flag); |
| 6062 | RB (t->function_decl.malloc_flag); |
| 6063 | RB (t->function_decl.declared_inline_flag); |
| 6064 | RB (t->function_decl.no_inline_warning_flag); |
| 6065 | RB (t->function_decl.no_instrument_function_entry_exit); |
| 6066 | RB (t->function_decl.no_limit_stack); |
| 6067 | RB (t->function_decl.disregard_inline_limits); |
| 6068 | RB (t->function_decl.pure_flag); |
| 6069 | RB (t->function_decl.looping_const_or_pure_flag); |
| 6070 | |
| 6071 | RB (t->function_decl.has_debug_args_flag); |
| 6072 | RB (t->function_decl.versioned_function); |
| 6073 | RB (t->function_decl.replaceable_operator); |
| 6074 | |
| 6075 | /* decl_type is a (misnamed) 2 bit discriminator. */ |
| 6076 | unsigned kind = 0; |
| 6077 | kind |= unsigned (bits.b ()) << 0; |
| 6078 | kind |= unsigned (bits.b ()) << 1; |
| 6079 | t->function_decl.decl_type = function_decl_type (kind); |
| 6080 | } |
| 6081 | #undef RB_IF |
| 6082 | #undef RB |
| 6083 | done: |
| 6084 | return !get_overrun (); |
| 6085 | } |
| 6086 | |
| 6087 | void |
| 6088 | trees_out::lang_decl_bools (tree t, bits_out& bits) |
| 6089 | { |
| 6090 | #define WB(X) (bits.b (X)) |
| 6091 | const struct lang_decl *lang = DECL_LANG_SPECIFIC (t); |
| 6092 | |
| 6093 | bits.bflush (); |
| 6094 | WB (lang->u.base.language == lang_cplusplus); |
| 6095 | WB ((lang->u.base.use_template >> 0) & 1); |
| 6096 | WB ((lang->u.base.use_template >> 1) & 1); |
| 6097 | /* Do not write lang->u.base.not_really_extern, importer will set |
| 6098 | when reading the definition (if any). */ |
| 6099 | WB (lang->u.base.initialized_in_class); |
| 6100 | |
| 6101 | WB (lang->u.base.threadprivate_or_deleted_p); |
| 6102 | WB (lang->u.base.anticipated_p); |
| 6103 | WB (lang->u.base.friend_or_tls); |
| 6104 | WB (lang->u.base.unknown_bound_p); |
| 6105 | /* Do not write lang->u.base.odr_used, importer will recalculate if |
| 6106 | they do ODR use this decl. */ |
| 6107 | WB (lang->u.base.concept_p); |
| 6108 | WB (lang->u.base.var_declared_inline_p); |
| 6109 | WB (lang->u.base.dependent_init_p); |
| 6110 | |
| 6111 | /* When building a header unit, everthing is marked as purview, (so |
| 6112 | we know which decls to write). But when we import them we do not |
| 6113 | want to mark them as in module purview. */ |
| 6114 | WB (lang->u.base.module_purview_p && !header_module_p ()); |
| 6115 | WB (lang->u.base.module_attach_p); |
| 6116 | /* Importer will set module_import_p and module_entity_p themselves |
| 6117 | as appropriate. */ |
| 6118 | WB (lang->u.base.module_keyed_decls_p); |
| 6119 | |
| 6120 | WB (lang->u.base.omp_declare_mapper_p); |
| 6121 | |
| 6122 | switch (lang->u.base.selector) |
| 6123 | { |
| 6124 | default: |
| 6125 | gcc_unreachable (); |
| 6126 | |
| 6127 | case lds_fn: /* lang_decl_fn. */ |
| 6128 | WB (lang->u.fn.global_ctor_p); |
| 6129 | WB (lang->u.fn.global_dtor_p); |
| 6130 | |
| 6131 | WB (lang->u.fn.static_function); |
| 6132 | WB (lang->u.fn.pure_virtual); |
| 6133 | WB (lang->u.fn.defaulted_p); |
| 6134 | WB (lang->u.fn.has_in_charge_parm_p); |
| 6135 | WB (lang->u.fn.has_vtt_parm_p); |
| 6136 | /* There shouldn't be a pending inline at this point. */ |
| 6137 | gcc_assert (!lang->u.fn.pending_inline_p); |
| 6138 | WB (lang->u.fn.nonconverting); |
| 6139 | WB (lang->u.fn.thunk_p); |
| 6140 | |
| 6141 | WB (lang->u.fn.this_thunk_p); |
| 6142 | WB (lang->u.fn.omp_declare_reduction_p); |
| 6143 | WB (lang->u.fn.has_dependent_explicit_spec_p); |
| 6144 | WB (lang->u.fn.immediate_fn_p); |
| 6145 | WB (lang->u.fn.maybe_deleted); |
| 6146 | WB (lang->u.fn.coroutine_p); |
| 6147 | WB (lang->u.fn.implicit_constexpr); |
| 6148 | WB (lang->u.fn.escalated_p); |
| 6149 | WB (lang->u.fn.xobj_func); |
| 6150 | goto lds_min; |
| 6151 | |
| 6152 | case lds_decomp: /* lang_decl_decomp. */ |
| 6153 | /* No bools. */ |
| 6154 | goto lds_min; |
| 6155 | |
| 6156 | case lds_min: /* lang_decl_min. */ |
| 6157 | lds_min: |
| 6158 | /* No bools. */ |
| 6159 | break; |
| 6160 | |
| 6161 | case lds_ns: /* lang_decl_ns. */ |
| 6162 | /* No bools. */ |
| 6163 | break; |
| 6164 | |
| 6165 | case lds_parm: /* lang_decl_parm. */ |
| 6166 | /* No bools. */ |
| 6167 | break; |
| 6168 | } |
| 6169 | #undef WB |
| 6170 | } |
| 6171 | |
| 6172 | bool |
| 6173 | trees_in::lang_decl_bools (tree t, bits_in& bits) |
| 6174 | { |
| 6175 | #define RB(X) ((X) = bits.b ()) |
| 6176 | struct lang_decl *lang = DECL_LANG_SPECIFIC (t); |
| 6177 | |
| 6178 | bits.bflush (); |
| 6179 | lang->u.base.language = bits.b () ? lang_cplusplus : lang_c; |
| 6180 | unsigned v; |
| 6181 | v = bits.b () << 0; |
| 6182 | v |= bits.b () << 1; |
| 6183 | lang->u.base.use_template = v; |
| 6184 | /* lang->u.base.not_really_extern is not streamed. */ |
| 6185 | RB (lang->u.base.initialized_in_class); |
| 6186 | |
| 6187 | RB (lang->u.base.threadprivate_or_deleted_p); |
| 6188 | RB (lang->u.base.anticipated_p); |
| 6189 | RB (lang->u.base.friend_or_tls); |
| 6190 | RB (lang->u.base.unknown_bound_p); |
| 6191 | /* lang->u.base.odr_used is not streamed. */ |
| 6192 | RB (lang->u.base.concept_p); |
| 6193 | RB (lang->u.base.var_declared_inline_p); |
| 6194 | RB (lang->u.base.dependent_init_p); |
| 6195 | |
| 6196 | RB (lang->u.base.module_purview_p); |
| 6197 | RB (lang->u.base.module_attach_p); |
| 6198 | /* module_import_p and module_entity_p are not streamed. */ |
| 6199 | RB (lang->u.base.module_keyed_decls_p); |
| 6200 | |
| 6201 | RB (lang->u.base.omp_declare_mapper_p); |
| 6202 | |
| 6203 | switch (lang->u.base.selector) |
| 6204 | { |
| 6205 | default: |
| 6206 | gcc_unreachable (); |
| 6207 | |
| 6208 | case lds_fn: /* lang_decl_fn. */ |
| 6209 | RB (lang->u.fn.global_ctor_p); |
| 6210 | RB (lang->u.fn.global_dtor_p); |
| 6211 | |
| 6212 | RB (lang->u.fn.static_function); |
| 6213 | RB (lang->u.fn.pure_virtual); |
| 6214 | RB (lang->u.fn.defaulted_p); |
| 6215 | RB (lang->u.fn.has_in_charge_parm_p); |
| 6216 | RB (lang->u.fn.has_vtt_parm_p); |
| 6217 | /* lang->u.f.n.pending_inline_p is not streamed. */ |
| 6218 | RB (lang->u.fn.nonconverting); |
| 6219 | RB (lang->u.fn.thunk_p); |
| 6220 | |
| 6221 | RB (lang->u.fn.this_thunk_p); |
| 6222 | RB (lang->u.fn.omp_declare_reduction_p); |
| 6223 | RB (lang->u.fn.has_dependent_explicit_spec_p); |
| 6224 | RB (lang->u.fn.immediate_fn_p); |
| 6225 | RB (lang->u.fn.maybe_deleted); |
| 6226 | RB (lang->u.fn.coroutine_p); |
| 6227 | RB (lang->u.fn.implicit_constexpr); |
| 6228 | RB (lang->u.fn.escalated_p); |
| 6229 | RB (lang->u.fn.xobj_func); |
| 6230 | goto lds_min; |
| 6231 | |
| 6232 | case lds_decomp: /* lang_decl_decomp. */ |
| 6233 | /* No bools. */ |
| 6234 | goto lds_min; |
| 6235 | |
| 6236 | case lds_min: /* lang_decl_min. */ |
| 6237 | lds_min: |
| 6238 | /* No bools. */ |
| 6239 | break; |
| 6240 | |
| 6241 | case lds_ns: /* lang_decl_ns. */ |
| 6242 | /* No bools. */ |
| 6243 | break; |
| 6244 | |
| 6245 | case lds_parm: /* lang_decl_parm. */ |
| 6246 | /* No bools. */ |
| 6247 | break; |
| 6248 | } |
| 6249 | #undef RB |
| 6250 | return !get_overrun (); |
| 6251 | } |
| 6252 | |
| 6253 | void |
| 6254 | trees_out::lang_type_bools (tree t, bits_out& bits) |
| 6255 | { |
| 6256 | #define WB(X) (bits.b (X)) |
| 6257 | const struct lang_type *lang = TYPE_LANG_SPECIFIC (t); |
| 6258 | |
| 6259 | bits.bflush (); |
| 6260 | WB (lang->has_type_conversion); |
| 6261 | WB (lang->has_copy_ctor); |
| 6262 | WB (lang->has_default_ctor); |
| 6263 | WB (lang->const_needs_init); |
| 6264 | WB (lang->ref_needs_init); |
| 6265 | WB (lang->has_const_copy_assign); |
| 6266 | WB ((lang->use_template >> 0) & 1); |
| 6267 | WB ((lang->use_template >> 1) & 1); |
| 6268 | |
| 6269 | WB (lang->has_mutable); |
| 6270 | WB (lang->com_interface); |
| 6271 | WB (lang->non_pod_class); |
| 6272 | WB (lang->nearly_empty_p); |
| 6273 | WB (lang->user_align); |
| 6274 | WB (lang->has_copy_assign); |
| 6275 | WB (lang->has_new); |
| 6276 | WB (lang->has_array_new); |
| 6277 | |
| 6278 | WB ((lang->gets_delete >> 0) & 1); |
| 6279 | WB ((lang->gets_delete >> 1) & 1); |
| 6280 | WB (lang->interface_only); |
| 6281 | WB (lang->interface_unknown); |
| 6282 | WB (lang->contains_empty_class_p); |
| 6283 | WB (lang->anon_aggr); |
| 6284 | WB (lang->non_zero_init); |
| 6285 | WB (lang->empty_p); |
| 6286 | |
| 6287 | WB (lang->vec_new_uses_cookie); |
| 6288 | WB (lang->declared_class); |
| 6289 | WB (lang->diamond_shaped); |
| 6290 | WB (lang->repeated_base); |
| 6291 | gcc_checking_assert (!lang->being_defined); |
| 6292 | // lang->debug_requested |
| 6293 | WB (lang->fields_readonly); |
| 6294 | WB (lang->ptrmemfunc_flag); |
| 6295 | |
| 6296 | WB (lang->lazy_default_ctor); |
| 6297 | WB (lang->lazy_copy_ctor); |
| 6298 | WB (lang->lazy_copy_assign); |
| 6299 | WB (lang->lazy_destructor); |
| 6300 | WB (lang->has_const_copy_ctor); |
| 6301 | WB (lang->has_complex_copy_ctor); |
| 6302 | WB (lang->has_complex_copy_assign); |
| 6303 | WB (lang->non_aggregate); |
| 6304 | |
| 6305 | WB (lang->has_complex_dflt); |
| 6306 | WB (lang->has_list_ctor); |
| 6307 | WB (lang->non_std_layout); |
| 6308 | WB (lang->is_literal); |
| 6309 | WB (lang->lazy_move_ctor); |
| 6310 | WB (lang->lazy_move_assign); |
| 6311 | WB (lang->has_complex_move_ctor); |
| 6312 | WB (lang->has_complex_move_assign); |
| 6313 | |
| 6314 | WB (lang->has_constexpr_ctor); |
| 6315 | WB (lang->unique_obj_representations); |
| 6316 | WB (lang->unique_obj_representations_set); |
| 6317 | gcc_checking_assert (!lang->erroneous); |
| 6318 | WB (lang->non_pod_aggregate); |
| 6319 | WB (lang->non_aggregate_pod); |
| 6320 | #undef WB |
| 6321 | } |
| 6322 | |
| 6323 | bool |
| 6324 | trees_in::lang_type_bools (tree t, bits_in& bits) |
| 6325 | { |
| 6326 | #define RB(X) ((X) = bits.b ()) |
| 6327 | struct lang_type *lang = TYPE_LANG_SPECIFIC (t); |
| 6328 | |
| 6329 | bits.bflush (); |
| 6330 | RB (lang->has_type_conversion); |
| 6331 | RB (lang->has_copy_ctor); |
| 6332 | RB (lang->has_default_ctor); |
| 6333 | RB (lang->const_needs_init); |
| 6334 | RB (lang->ref_needs_init); |
| 6335 | RB (lang->has_const_copy_assign); |
| 6336 | unsigned v; |
| 6337 | v = bits.b () << 0; |
| 6338 | v |= bits.b () << 1; |
| 6339 | lang->use_template = v; |
| 6340 | |
| 6341 | RB (lang->has_mutable); |
| 6342 | RB (lang->com_interface); |
| 6343 | RB (lang->non_pod_class); |
| 6344 | RB (lang->nearly_empty_p); |
| 6345 | RB (lang->user_align); |
| 6346 | RB (lang->has_copy_assign); |
| 6347 | RB (lang->has_new); |
| 6348 | RB (lang->has_array_new); |
| 6349 | |
| 6350 | v = bits.b () << 0; |
| 6351 | v |= bits.b () << 1; |
| 6352 | lang->gets_delete = v; |
| 6353 | RB (lang->interface_only); |
| 6354 | RB (lang->interface_unknown); |
| 6355 | RB (lang->contains_empty_class_p); |
| 6356 | RB (lang->anon_aggr); |
| 6357 | RB (lang->non_zero_init); |
| 6358 | RB (lang->empty_p); |
| 6359 | |
| 6360 | RB (lang->vec_new_uses_cookie); |
| 6361 | RB (lang->declared_class); |
| 6362 | RB (lang->diamond_shaped); |
| 6363 | RB (lang->repeated_base); |
| 6364 | gcc_checking_assert (!lang->being_defined); |
| 6365 | gcc_checking_assert (!lang->debug_requested); |
| 6366 | RB (lang->fields_readonly); |
| 6367 | RB (lang->ptrmemfunc_flag); |
| 6368 | |
| 6369 | RB (lang->lazy_default_ctor); |
| 6370 | RB (lang->lazy_copy_ctor); |
| 6371 | RB (lang->lazy_copy_assign); |
| 6372 | RB (lang->lazy_destructor); |
| 6373 | RB (lang->has_const_copy_ctor); |
| 6374 | RB (lang->has_complex_copy_ctor); |
| 6375 | RB (lang->has_complex_copy_assign); |
| 6376 | RB (lang->non_aggregate); |
| 6377 | |
| 6378 | RB (lang->has_complex_dflt); |
| 6379 | RB (lang->has_list_ctor); |
| 6380 | RB (lang->non_std_layout); |
| 6381 | RB (lang->is_literal); |
| 6382 | RB (lang->lazy_move_ctor); |
| 6383 | RB (lang->lazy_move_assign); |
| 6384 | RB (lang->has_complex_move_ctor); |
| 6385 | RB (lang->has_complex_move_assign); |
| 6386 | |
| 6387 | RB (lang->has_constexpr_ctor); |
| 6388 | RB (lang->unique_obj_representations); |
| 6389 | RB (lang->unique_obj_representations_set); |
| 6390 | gcc_checking_assert (!lang->erroneous); |
| 6391 | RB (lang->non_pod_aggregate); |
| 6392 | RB (lang->non_aggregate_pod); |
| 6393 | #undef RB |
| 6394 | return !get_overrun (); |
| 6395 | } |
| 6396 | |
| 6397 | /* Read & write the core values and pointers. */ |
| 6398 | |
| 6399 | void |
| 6400 | trees_out::core_vals (tree t) |
| 6401 | { |
| 6402 | #define WU(X) (u (X)) |
| 6403 | #define WT(X) (tree_node (X)) |
| 6404 | tree_code code = TREE_CODE (t); |
| 6405 | |
| 6406 | /* First by shape of the tree. */ |
| 6407 | |
| 6408 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) |
| 6409 | { |
| 6410 | /* Write this early, for better log information. */ |
| 6411 | WT (t->decl_minimal.name); |
| 6412 | if (!DECL_TEMPLATE_PARM_P (t)) |
| 6413 | WT (t->decl_minimal.context); |
| 6414 | |
| 6415 | if (state) |
| 6416 | state->write_location (*this, t->decl_minimal.locus); |
| 6417 | |
| 6418 | if (streaming_p ()) |
| 6419 | if (has_warning_spec (t)) |
| 6420 | u (v: get_warning_spec (t)); |
| 6421 | } |
| 6422 | |
| 6423 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) |
| 6424 | { |
| 6425 | /* The only types we write also have TYPE_NON_COMMON. */ |
| 6426 | gcc_checking_assert (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)); |
| 6427 | |
| 6428 | /* We only stream the main variant. */ |
| 6429 | gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t); |
| 6430 | |
| 6431 | /* Stream the name & context first, for better log information */ |
| 6432 | WT (t->type_common.name); |
| 6433 | WT (t->type_common.context); |
| 6434 | |
| 6435 | /* By construction we want to make sure we have the canonical |
| 6436 | and main variants already in the type table, so emit them |
| 6437 | now. */ |
| 6438 | WT (t->type_common.main_variant); |
| 6439 | |
| 6440 | tree canonical = t->type_common.canonical; |
| 6441 | if (canonical && DECL_TEMPLATE_PARM_P (TYPE_NAME (t))) |
| 6442 | /* We do not want to wander into different templates. |
| 6443 | Reconstructed on stream in. */ |
| 6444 | canonical = t; |
| 6445 | WT (canonical); |
| 6446 | |
| 6447 | /* type_common.next_variant is internally manipulated. */ |
| 6448 | /* type_common.pointer_to, type_common.reference_to. */ |
| 6449 | |
| 6450 | if (streaming_p ()) |
| 6451 | { |
| 6452 | WU (t->type_common.precision); |
| 6453 | WU (t->type_common.contains_placeholder_bits); |
| 6454 | WU (t->type_common.mode); |
| 6455 | WU (t->type_common.align); |
| 6456 | } |
| 6457 | |
| 6458 | if (!RECORD_OR_UNION_CODE_P (code)) |
| 6459 | { |
| 6460 | WT (t->type_common.size); |
| 6461 | WT (t->type_common.size_unit); |
| 6462 | } |
| 6463 | WT (t->type_common.attributes); |
| 6464 | |
| 6465 | WT (t->type_common.common.chain); /* TYPE_STUB_DECL. */ |
| 6466 | } |
| 6467 | |
| 6468 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 6469 | { |
| 6470 | if (streaming_p ()) |
| 6471 | { |
| 6472 | WU (t->decl_common.mode); |
| 6473 | WU (t->decl_common.off_align); |
| 6474 | WU (t->decl_common.align); |
| 6475 | } |
| 6476 | |
| 6477 | /* For templates these hold instantiation (partial and/or |
| 6478 | specialization) information. */ |
| 6479 | if (code != TEMPLATE_DECL) |
| 6480 | { |
| 6481 | WT (t->decl_common.size); |
| 6482 | WT (t->decl_common.size_unit); |
| 6483 | } |
| 6484 | |
| 6485 | WT (t->decl_common.attributes); |
| 6486 | // FIXME: Does this introduce cross-decl links? For instance |
| 6487 | // from instantiation to the template. If so, we'll need more |
| 6488 | // deduplication logic. I think we'll need to walk the blocks |
| 6489 | // of the owning function_decl's abstract origin in tandem, to |
| 6490 | // generate the locating data needed? |
| 6491 | WT (t->decl_common.abstract_origin); |
| 6492 | } |
| 6493 | |
| 6494 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 6495 | { |
| 6496 | WT (t->decl_with_vis.assembler_name); |
| 6497 | if (streaming_p ()) |
| 6498 | WU (t->decl_with_vis.visibility); |
| 6499 | } |
| 6500 | |
| 6501 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)) |
| 6502 | { |
| 6503 | if (code == ENUMERAL_TYPE) |
| 6504 | { |
| 6505 | /* These fields get set even for opaque enums that lack a |
| 6506 | definition, so we stream them directly for each ENUMERAL_TYPE. |
| 6507 | We stream TYPE_VALUES as part of the definition. */ |
| 6508 | WT (t->type_non_common.maxval); |
| 6509 | WT (t->type_non_common.minval); |
| 6510 | } |
| 6511 | /* Records and unions hold FIELDS, VFIELD & BINFO on these |
| 6512 | things. */ |
| 6513 | else if (!RECORD_OR_UNION_CODE_P (code)) |
| 6514 | { |
| 6515 | // FIXME: These are from tpl_parm_value's 'type' writing. |
| 6516 | // Perhaps it should just be doing them directly? |
| 6517 | gcc_checking_assert (code == TEMPLATE_TYPE_PARM |
| 6518 | || code == TEMPLATE_TEMPLATE_PARM |
| 6519 | || code == BOUND_TEMPLATE_TEMPLATE_PARM); |
| 6520 | gcc_checking_assert (!TYPE_CACHED_VALUES_P (t)); |
| 6521 | WT (t->type_non_common.values); |
| 6522 | WT (t->type_non_common.maxval); |
| 6523 | WT (t->type_non_common.minval); |
| 6524 | } |
| 6525 | |
| 6526 | WT (t->type_non_common.lang_1); |
| 6527 | } |
| 6528 | |
| 6529 | if (CODE_CONTAINS_STRUCT (code, TS_EXP)) |
| 6530 | { |
| 6531 | if (state) |
| 6532 | state->write_location (*this, t->exp.locus); |
| 6533 | |
| 6534 | if (streaming_p ()) |
| 6535 | if (has_warning_spec (t)) |
| 6536 | u (v: get_warning_spec (t)); |
| 6537 | |
| 6538 | bool vl = TREE_CODE_CLASS (code) == tcc_vl_exp; |
| 6539 | unsigned limit = (vl ? VL_EXP_OPERAND_LENGTH (t) |
| 6540 | : TREE_OPERAND_LENGTH (t)); |
| 6541 | unsigned ix = unsigned (vl); |
| 6542 | if (code == REQUIRES_EXPR) |
| 6543 | { |
| 6544 | /* The first operand of a REQUIRES_EXPR is a tree chain |
| 6545 | of PARM_DECLs. We need to stream this separately as |
| 6546 | otherwise we would only stream the first one. */ |
| 6547 | chained_decls (REQUIRES_EXPR_PARMS (t)); |
| 6548 | ++ix; |
| 6549 | } |
| 6550 | for (; ix != limit; ix++) |
| 6551 | WT (TREE_OPERAND (t, ix)); |
| 6552 | } |
| 6553 | else |
| 6554 | /* The CODE_CONTAINS tables were inaccurate when I started. */ |
| 6555 | gcc_checking_assert (TREE_CODE_CLASS (code) != tcc_expression |
| 6556 | && TREE_CODE_CLASS (code) != tcc_binary |
| 6557 | && TREE_CODE_CLASS (code) != tcc_unary |
| 6558 | && TREE_CODE_CLASS (code) != tcc_reference |
| 6559 | && TREE_CODE_CLASS (code) != tcc_comparison |
| 6560 | && TREE_CODE_CLASS (code) != tcc_statement |
| 6561 | && TREE_CODE_CLASS (code) != tcc_vl_exp); |
| 6562 | |
| 6563 | /* Then by CODE. Special cases and/or 1:1 tree shape |
| 6564 | correspondance. */ |
| 6565 | switch (code) |
| 6566 | { |
| 6567 | default: |
| 6568 | break; |
| 6569 | |
| 6570 | case ARGUMENT_PACK_SELECT: /* Transient during instantiation. */ |
| 6571 | case DEFERRED_PARSE: /* Expanded upon completion of |
| 6572 | outermost class. */ |
| 6573 | case IDENTIFIER_NODE: /* Streamed specially. */ |
| 6574 | case BINDING_VECTOR: /* Only in namespace-scope symbol |
| 6575 | table. */ |
| 6576 | case SSA_NAME: |
| 6577 | case TRANSLATION_UNIT_DECL: /* There is only one, it is a |
| 6578 | global_tree. */ |
| 6579 | case USERDEF_LITERAL: /* Expanded during parsing. */ |
| 6580 | gcc_unreachable (); /* Should never meet. */ |
| 6581 | |
| 6582 | /* Constants. */ |
| 6583 | case COMPLEX_CST: |
| 6584 | WT (TREE_REALPART (t)); |
| 6585 | WT (TREE_IMAGPART (t)); |
| 6586 | break; |
| 6587 | |
| 6588 | case FIXED_CST: |
| 6589 | gcc_unreachable (); /* Not supported in C++. */ |
| 6590 | |
| 6591 | case INTEGER_CST: |
| 6592 | if (streaming_p ()) |
| 6593 | { |
| 6594 | unsigned num = TREE_INT_CST_EXT_NUNITS (t); |
| 6595 | for (unsigned ix = 0; ix != num; ix++) |
| 6596 | wu (TREE_INT_CST_ELT (t, ix)); |
| 6597 | } |
| 6598 | break; |
| 6599 | |
| 6600 | case POLY_INT_CST: |
| 6601 | if (streaming_p ()) |
| 6602 | for (unsigned ix = 0; ix != NUM_POLY_INT_COEFFS; ix++) |
| 6603 | WT (POLY_INT_CST_COEFF (t, ix)); |
| 6604 | break; |
| 6605 | |
| 6606 | case REAL_CST: |
| 6607 | if (streaming_p ()) |
| 6608 | buf (TREE_REAL_CST_PTR (t), len: sizeof (real_value)); |
| 6609 | break; |
| 6610 | |
| 6611 | case STRING_CST: |
| 6612 | /* Streamed during start. */ |
| 6613 | break; |
| 6614 | |
| 6615 | case RAW_DATA_CST: |
| 6616 | if (RAW_DATA_OWNER (t) == NULL_TREE) |
| 6617 | break; /* Streamed as STRING_CST during start. */ |
| 6618 | WT (RAW_DATA_OWNER (t)); |
| 6619 | if (streaming_p ()) |
| 6620 | { |
| 6621 | if (TREE_CODE (RAW_DATA_OWNER (t)) == RAW_DATA_CST) |
| 6622 | z (RAW_DATA_POINTER (t) - RAW_DATA_POINTER (RAW_DATA_OWNER (t))); |
| 6623 | else if (TREE_CODE (RAW_DATA_OWNER (t)) == STRING_CST) |
| 6624 | z (RAW_DATA_POINTER (t) |
| 6625 | - TREE_STRING_POINTER (RAW_DATA_OWNER (t))); |
| 6626 | else |
| 6627 | gcc_unreachable (); |
| 6628 | } |
| 6629 | break; |
| 6630 | |
| 6631 | case VECTOR_CST: |
| 6632 | for (unsigned ix = vector_cst_encoded_nelts (t); ix--;) |
| 6633 | WT (VECTOR_CST_ENCODED_ELT (t, ix)); |
| 6634 | break; |
| 6635 | |
| 6636 | /* Decls. */ |
| 6637 | case VAR_DECL: |
| 6638 | if (DECL_CONTEXT (t) |
| 6639 | && TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL) |
| 6640 | { |
| 6641 | if (DECL_HAS_VALUE_EXPR_P (t)) |
| 6642 | WT (DECL_VALUE_EXPR (t)); |
| 6643 | break; |
| 6644 | } |
| 6645 | /* FALLTHROUGH */ |
| 6646 | |
| 6647 | case RESULT_DECL: |
| 6648 | case PARM_DECL: |
| 6649 | if (DECL_HAS_VALUE_EXPR_P (t)) |
| 6650 | WT (DECL_VALUE_EXPR (t)); |
| 6651 | /* FALLTHROUGH */ |
| 6652 | |
| 6653 | case CONST_DECL: |
| 6654 | case IMPORTED_DECL: |
| 6655 | WT (t->decl_common.initial); |
| 6656 | break; |
| 6657 | |
| 6658 | case FIELD_DECL: |
| 6659 | WT (t->field_decl.offset); |
| 6660 | WT (t->field_decl.bit_field_type); |
| 6661 | { |
| 6662 | auto ovr = make_temp_override (var&: walking_bit_field_unit, overrider: true); |
| 6663 | WT (t->field_decl.qualifier); /* bitfield unit. */ |
| 6664 | } |
| 6665 | WT (t->field_decl.bit_offset); |
| 6666 | WT (t->field_decl.fcontext); |
| 6667 | WT (t->decl_common.initial); |
| 6668 | break; |
| 6669 | |
| 6670 | case LABEL_DECL: |
| 6671 | if (streaming_p ()) |
| 6672 | { |
| 6673 | WU (t->label_decl.label_decl_uid); |
| 6674 | WU (t->label_decl.eh_landing_pad_nr); |
| 6675 | } |
| 6676 | break; |
| 6677 | |
| 6678 | case FUNCTION_DECL: |
| 6679 | if (streaming_p ()) |
| 6680 | { |
| 6681 | /* Builtins can be streamed by value when a header declares |
| 6682 | them. */ |
| 6683 | WU (DECL_BUILT_IN_CLASS (t)); |
| 6684 | if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN) |
| 6685 | WU (DECL_UNCHECKED_FUNCTION_CODE (t)); |
| 6686 | } |
| 6687 | |
| 6688 | WT (t->function_decl.personality); |
| 6689 | /* Rather than streaming target/optimize nodes, we should reconstruct |
| 6690 | them on stream-in from any attributes applied to the function. */ |
| 6691 | if (streaming_p () && t->function_decl.function_specific_target) |
| 6692 | warning_at (DECL_SOURCE_LOCATION (t), 0, |
| 6693 | "%<target%> attribute currently unsupported in modules" ); |
| 6694 | if (streaming_p () && t->function_decl.function_specific_optimization) |
| 6695 | warning_at (DECL_SOURCE_LOCATION (t), 0, |
| 6696 | "%<optimize%> attribute currently unsupported in modules" ); |
| 6697 | WT (t->function_decl.vindex); |
| 6698 | |
| 6699 | if (DECL_HAS_DEPENDENT_EXPLICIT_SPEC_P (t)) |
| 6700 | WT (lookup_explicit_specifier (t)); |
| 6701 | break; |
| 6702 | |
| 6703 | case USING_DECL: |
| 6704 | /* USING_DECL_DECLS */ |
| 6705 | WT (t->decl_common.initial); |
| 6706 | /* FALLTHROUGH */ |
| 6707 | |
| 6708 | case TYPE_DECL: |
| 6709 | /* USING_DECL: USING_DECL_SCOPE */ |
| 6710 | /* TYPE_DECL: DECL_ORIGINAL_TYPE */ |
| 6711 | WT (t->decl_non_common.result); |
| 6712 | break; |
| 6713 | |
| 6714 | /* Miscellaneous common nodes. */ |
| 6715 | case BLOCK: |
| 6716 | if (state) |
| 6717 | { |
| 6718 | state->write_location (*this, t->block.locus); |
| 6719 | state->write_location (*this, t->block.end_locus); |
| 6720 | } |
| 6721 | |
| 6722 | /* DECL_LOCAL_DECL_P decls are first encountered here and |
| 6723 | streamed by value. */ |
| 6724 | for (tree decls = t->block.vars; decls; decls = DECL_CHAIN (decls)) |
| 6725 | { |
| 6726 | if (VAR_OR_FUNCTION_DECL_P (decls) |
| 6727 | && DECL_LOCAL_DECL_P (decls)) |
| 6728 | { |
| 6729 | /* Make sure this is the first encounter, and mark for |
| 6730 | walk-by-value. */ |
| 6731 | gcc_checking_assert (!TREE_VISITED (decls) |
| 6732 | && !DECL_TEMPLATE_INFO (decls)); |
| 6733 | mark_by_value (decl: decls); |
| 6734 | } |
| 6735 | tree_node (decls); |
| 6736 | } |
| 6737 | tree_node (NULL_TREE); |
| 6738 | |
| 6739 | /* nonlocalized_vars is a middle-end thing. */ |
| 6740 | WT (t->block.subblocks); |
| 6741 | WT (t->block.supercontext); |
| 6742 | // FIXME: As for decl's abstract_origin, does this introduce crosslinks? |
| 6743 | WT (t->block.abstract_origin); |
| 6744 | /* fragment_origin, fragment_chain are middle-end things. */ |
| 6745 | WT (t->block.chain); |
| 6746 | /* nonlocalized_vars, block_num & die are middle endy/debug |
| 6747 | things. */ |
| 6748 | break; |
| 6749 | |
| 6750 | case CALL_EXPR: |
| 6751 | if (streaming_p ()) |
| 6752 | WU (t->base.u.ifn); |
| 6753 | break; |
| 6754 | |
| 6755 | case CONSTRUCTOR: |
| 6756 | // This must be streamed /after/ we've streamed the type, |
| 6757 | // because it can directly refer to elements of the type. Eg, |
| 6758 | // FIELD_DECLs of a RECORD_TYPE. |
| 6759 | break; |
| 6760 | |
| 6761 | case OMP_CLAUSE: |
| 6762 | { |
| 6763 | /* The ompcode is serialized in start. */ |
| 6764 | if (streaming_p ()) |
| 6765 | WU (t->omp_clause.subcode.map_kind); |
| 6766 | if (state) |
| 6767 | state->write_location (*this, t->omp_clause.locus); |
| 6768 | |
| 6769 | unsigned len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; |
| 6770 | for (unsigned ix = 0; ix != len; ix++) |
| 6771 | WT (t->omp_clause.ops[ix]); |
| 6772 | } |
| 6773 | break; |
| 6774 | |
| 6775 | case STATEMENT_LIST: |
| 6776 | for (tree stmt : tsi_range (t)) |
| 6777 | if (stmt) |
| 6778 | WT (stmt); |
| 6779 | WT (NULL_TREE); |
| 6780 | break; |
| 6781 | |
| 6782 | case OPTIMIZATION_NODE: |
| 6783 | case TARGET_OPTION_NODE: |
| 6784 | // FIXME: Our representation for these two nodes is a cache of |
| 6785 | // the resulting set of options. Not a record of the options |
| 6786 | // that got changed by a particular attribute or pragma. Instead |
| 6787 | // of recording that, we probably should just rebuild the options |
| 6788 | // on stream-in from the function attributes. This could introduce |
| 6789 | // strangeness if the importer has some incompatible set of flags |
| 6790 | // but we currently assume users "know what they're doing" in such |
| 6791 | // a case anyway. |
| 6792 | gcc_unreachable (); |
| 6793 | break; |
| 6794 | |
| 6795 | case TREE_BINFO: |
| 6796 | { |
| 6797 | WT (t->binfo.common.chain); |
| 6798 | WT (t->binfo.offset); |
| 6799 | WT (t->binfo.inheritance); |
| 6800 | WT (t->binfo.vptr_field); |
| 6801 | |
| 6802 | WT (t->binfo.vtable); |
| 6803 | WT (t->binfo.virtuals); |
| 6804 | WT (t->binfo.vtt_subvtt); |
| 6805 | WT (t->binfo.vtt_vptr); |
| 6806 | |
| 6807 | tree_vec (BINFO_BASE_ACCESSES (t)); |
| 6808 | unsigned num = vec_safe_length (BINFO_BASE_ACCESSES (t)); |
| 6809 | for (unsigned ix = 0; ix != num; ix++) |
| 6810 | WT (BINFO_BASE_BINFO (t, ix)); |
| 6811 | } |
| 6812 | break; |
| 6813 | |
| 6814 | case TREE_LIST: |
| 6815 | WT (t->list.purpose); |
| 6816 | WT (t->list.value); |
| 6817 | WT (t->list.common.chain); |
| 6818 | break; |
| 6819 | |
| 6820 | case TREE_VEC: |
| 6821 | for (unsigned ix = TREE_VEC_LENGTH (t); ix--;) |
| 6822 | WT (TREE_VEC_ELT (t, ix)); |
| 6823 | /* We stash NON_DEFAULT_TEMPLATE_ARGS_COUNT on TREE_CHAIN! */ |
| 6824 | gcc_checking_assert (!t->type_common.common.chain |
| 6825 | || (TREE_CODE (t->type_common.common.chain) |
| 6826 | == INTEGER_CST)); |
| 6827 | WT (t->type_common.common.chain); |
| 6828 | break; |
| 6829 | |
| 6830 | /* C++-specific nodes ... */ |
| 6831 | case BASELINK: |
| 6832 | WT (((lang_tree_node *)t)->baselink.binfo); |
| 6833 | WT (((lang_tree_node *)t)->baselink.functions); |
| 6834 | WT (((lang_tree_node *)t)->baselink.access_binfo); |
| 6835 | WT (((lang_tree_node *)t)->baselink.common.chain); |
| 6836 | break; |
| 6837 | |
| 6838 | case CONSTRAINT_INFO: |
| 6839 | WT (((lang_tree_node *)t)->constraint_info.template_reqs); |
| 6840 | WT (((lang_tree_node *)t)->constraint_info.declarator_reqs); |
| 6841 | WT (((lang_tree_node *)t)->constraint_info.associated_constr); |
| 6842 | break; |
| 6843 | |
| 6844 | case DEFERRED_NOEXCEPT: |
| 6845 | WT (((lang_tree_node *)t)->deferred_noexcept.pattern); |
| 6846 | WT (((lang_tree_node *)t)->deferred_noexcept.args); |
| 6847 | break; |
| 6848 | |
| 6849 | case LAMBDA_EXPR: |
| 6850 | WT (((lang_tree_node *)t)->lambda_expression.capture_list); |
| 6851 | WT (((lang_tree_node *)t)->lambda_expression.this_capture); |
| 6852 | WT (((lang_tree_node *)t)->lambda_expression.extra_scope); |
| 6853 | WT (((lang_tree_node *)t)->lambda_expression.regen_info); |
| 6854 | WT (((lang_tree_node *)t)->lambda_expression.extra_args); |
| 6855 | /* pending_proxies is a parse-time thing. */ |
| 6856 | gcc_assert (!((lang_tree_node *)t)->lambda_expression.pending_proxies); |
| 6857 | if (state) |
| 6858 | state->write_location |
| 6859 | (*this, ((lang_tree_node *)t)->lambda_expression.locus); |
| 6860 | if (streaming_p ()) |
| 6861 | { |
| 6862 | WU (((lang_tree_node *)t)->lambda_expression.default_capture_mode); |
| 6863 | WU (((lang_tree_node *)t)->lambda_expression.discriminator_scope); |
| 6864 | WU (((lang_tree_node *)t)->lambda_expression.discriminator_sig); |
| 6865 | } |
| 6866 | break; |
| 6867 | |
| 6868 | case OVERLOAD: |
| 6869 | WT (((lang_tree_node *)t)->overload.function); |
| 6870 | WT (t->common.chain); |
| 6871 | break; |
| 6872 | |
| 6873 | case PTRMEM_CST: |
| 6874 | WT (((lang_tree_node *)t)->ptrmem.member); |
| 6875 | break; |
| 6876 | |
| 6877 | case STATIC_ASSERT: |
| 6878 | WT (((lang_tree_node *)t)->static_assertion.condition); |
| 6879 | WT (((lang_tree_node *)t)->static_assertion.message); |
| 6880 | if (state) |
| 6881 | state->write_location |
| 6882 | (*this, ((lang_tree_node *)t)->static_assertion.location); |
| 6883 | break; |
| 6884 | |
| 6885 | case TEMPLATE_DECL: |
| 6886 | /* Streamed with the template_decl node itself. */ |
| 6887 | gcc_checking_assert |
| 6888 | (TREE_VISITED (((lang_tree_node *)t)->template_decl.arguments)); |
| 6889 | gcc_checking_assert |
| 6890 | (TREE_VISITED (((lang_tree_node *)t)->template_decl.result)); |
| 6891 | if (DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (t)) |
| 6892 | WT (DECL_CHAIN (t)); |
| 6893 | break; |
| 6894 | |
| 6895 | case TEMPLATE_INFO: |
| 6896 | { |
| 6897 | WT (((lang_tree_node *)t)->template_info.tmpl); |
| 6898 | WT (((lang_tree_node *)t)->template_info.args); |
| 6899 | WT (((lang_tree_node *)t)->template_info.partial); |
| 6900 | |
| 6901 | const auto *ac = (((lang_tree_node *)t) |
| 6902 | ->template_info.deferred_access_checks); |
| 6903 | unsigned len = vec_safe_length (v: ac); |
| 6904 | if (streaming_p ()) |
| 6905 | u (v: len); |
| 6906 | if (len) |
| 6907 | { |
| 6908 | for (unsigned ix = 0; ix != len; ix++) |
| 6909 | { |
| 6910 | const auto &m = (*ac)[ix]; |
| 6911 | WT (m.binfo); |
| 6912 | WT (m.decl); |
| 6913 | WT (m.diag_decl); |
| 6914 | if (state) |
| 6915 | state->write_location (*this, m.loc); |
| 6916 | } |
| 6917 | } |
| 6918 | } |
| 6919 | break; |
| 6920 | |
| 6921 | case TEMPLATE_PARM_INDEX: |
| 6922 | if (streaming_p ()) |
| 6923 | { |
| 6924 | WU (((lang_tree_node *)t)->tpi.index); |
| 6925 | WU (((lang_tree_node *)t)->tpi.level); |
| 6926 | WU (((lang_tree_node *)t)->tpi.orig_level); |
| 6927 | } |
| 6928 | WT (((lang_tree_node *)t)->tpi.decl); |
| 6929 | /* TEMPLATE_PARM_DESCENDANTS (AKA TREE_CHAIN) is an internal |
| 6930 | cache, do not stream. */ |
| 6931 | break; |
| 6932 | |
| 6933 | case TRAIT_EXPR: |
| 6934 | WT (((lang_tree_node *)t)->trait_expression.type1); |
| 6935 | WT (((lang_tree_node *)t)->trait_expression.type2); |
| 6936 | if (streaming_p ()) |
| 6937 | WU (((lang_tree_node *)t)->trait_expression.kind); |
| 6938 | break; |
| 6939 | |
| 6940 | case TU_LOCAL_ENTITY: |
| 6941 | WT (((lang_tree_node *)t)->tu_local_entity.name); |
| 6942 | if (state) |
| 6943 | state->write_location |
| 6944 | (*this, ((lang_tree_node *)t)->tu_local_entity.loc); |
| 6945 | break; |
| 6946 | } |
| 6947 | |
| 6948 | if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) |
| 6949 | { |
| 6950 | /* We want to stream the type of a expression-like nodes /after/ |
| 6951 | we've streamed the operands. The type often contains (bits |
| 6952 | of the) types of the operands, and with things like decltype |
| 6953 | and noexcept in play, we really want to stream the decls |
| 6954 | defining the type before we try and stream the type on its |
| 6955 | own. Otherwise we can find ourselves trying to read in a |
| 6956 | decl, when we're already partially reading in a component of |
| 6957 | its type. And that's bad. */ |
| 6958 | tree type = t->typed.type; |
| 6959 | unsigned prec = 0; |
| 6960 | |
| 6961 | switch (code) |
| 6962 | { |
| 6963 | default: |
| 6964 | break; |
| 6965 | |
| 6966 | case TEMPLATE_DECL: |
| 6967 | /* We fill in the template's type separately. */ |
| 6968 | type = NULL_TREE; |
| 6969 | break; |
| 6970 | |
| 6971 | case TYPE_DECL: |
| 6972 | if (DECL_ORIGINAL_TYPE (t) && t == TYPE_NAME (type)) |
| 6973 | /* This is a typedef. We set its type separately. */ |
| 6974 | type = NULL_TREE; |
| 6975 | break; |
| 6976 | |
| 6977 | case ENUMERAL_TYPE: |
| 6978 | if (type && !ENUM_FIXED_UNDERLYING_TYPE_P (t)) |
| 6979 | { |
| 6980 | /* Type is a restricted range integer type derived from the |
| 6981 | integer_types. Find the right one. */ |
| 6982 | prec = TYPE_PRECISION (type); |
| 6983 | tree name = DECL_NAME (TYPE_NAME (type)); |
| 6984 | |
| 6985 | for (unsigned itk = itk_none; itk--;) |
| 6986 | if (integer_types[itk] |
| 6987 | && DECL_NAME (TYPE_NAME (integer_types[itk])) == name) |
| 6988 | { |
| 6989 | type = integer_types[itk]; |
| 6990 | break; |
| 6991 | } |
| 6992 | gcc_assert (type != t->typed.type); |
| 6993 | } |
| 6994 | break; |
| 6995 | } |
| 6996 | |
| 6997 | WT (type); |
| 6998 | if (prec && streaming_p ()) |
| 6999 | WU (prec); |
| 7000 | } |
| 7001 | |
| 7002 | if (TREE_CODE (t) == CONSTRUCTOR) |
| 7003 | { |
| 7004 | unsigned len = vec_safe_length (v: t->constructor.elts); |
| 7005 | if (streaming_p ()) |
| 7006 | WU (len); |
| 7007 | if (len) |
| 7008 | for (unsigned ix = 0; ix != len; ix++) |
| 7009 | { |
| 7010 | const constructor_elt &elt = (*t->constructor.elts)[ix]; |
| 7011 | |
| 7012 | WT (elt.index); |
| 7013 | WT (elt.value); |
| 7014 | } |
| 7015 | } |
| 7016 | |
| 7017 | #undef WT |
| 7018 | #undef WU |
| 7019 | } |
| 7020 | |
| 7021 | // Streaming in a reference to a decl can cause that decl to be |
| 7022 | // TREE_USED, which is the mark_used behaviour we need most of the |
| 7023 | // time. The trees_in::unused can be incremented to inhibit this, |
| 7024 | // which is at least needed for vtables. |
| 7025 | |
| 7026 | bool |
| 7027 | trees_in::core_vals (tree t) |
| 7028 | { |
| 7029 | #define RU(X) ((X) = u ()) |
| 7030 | #define RUC(T,X) ((X) = T (u ())) |
| 7031 | #define RT(X) ((X) = tree_node ()) |
| 7032 | #define RTU(X) ((X) = tree_node (true)) |
| 7033 | tree_code code = TREE_CODE (t); |
| 7034 | |
| 7035 | /* First by tree shape. */ |
| 7036 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) |
| 7037 | { |
| 7038 | RT (t->decl_minimal.name); |
| 7039 | if (!DECL_TEMPLATE_PARM_P (t)) |
| 7040 | RT (t->decl_minimal.context); |
| 7041 | |
| 7042 | /* Don't zap the locus just yet, we don't record it correctly |
| 7043 | and thus lose all location information. */ |
| 7044 | t->decl_minimal.locus = state->read_location (*this); |
| 7045 | if (has_warning_spec (t)) |
| 7046 | put_warning_spec (t, u ()); |
| 7047 | } |
| 7048 | |
| 7049 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) |
| 7050 | { |
| 7051 | RT (t->type_common.name); |
| 7052 | RT (t->type_common.context); |
| 7053 | |
| 7054 | RT (t->type_common.main_variant); |
| 7055 | RT (t->type_common.canonical); |
| 7056 | |
| 7057 | /* type_common.next_variant is internally manipulated. */ |
| 7058 | /* type_common.pointer_to, type_common.reference_to. */ |
| 7059 | |
| 7060 | RU (t->type_common.precision); |
| 7061 | RU (t->type_common.contains_placeholder_bits); |
| 7062 | RUC (machine_mode, t->type_common.mode); |
| 7063 | RU (t->type_common.align); |
| 7064 | |
| 7065 | if (!RECORD_OR_UNION_CODE_P (code)) |
| 7066 | { |
| 7067 | RT (t->type_common.size); |
| 7068 | RT (t->type_common.size_unit); |
| 7069 | } |
| 7070 | RT (t->type_common.attributes); |
| 7071 | |
| 7072 | RT (t->type_common.common.chain); /* TYPE_STUB_DECL. */ |
| 7073 | } |
| 7074 | |
| 7075 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| 7076 | { |
| 7077 | RUC (machine_mode, t->decl_common.mode); |
| 7078 | RU (t->decl_common.off_align); |
| 7079 | RU (t->decl_common.align); |
| 7080 | |
| 7081 | if (code != TEMPLATE_DECL) |
| 7082 | { |
| 7083 | RT (t->decl_common.size); |
| 7084 | RT (t->decl_common.size_unit); |
| 7085 | } |
| 7086 | |
| 7087 | RT (t->decl_common.attributes); |
| 7088 | RT (t->decl_common.abstract_origin); |
| 7089 | } |
| 7090 | |
| 7091 | if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) |
| 7092 | { |
| 7093 | RT (t->decl_with_vis.assembler_name); |
| 7094 | RUC (symbol_visibility, t->decl_with_vis.visibility); |
| 7095 | } |
| 7096 | |
| 7097 | if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON)) |
| 7098 | { |
| 7099 | if (code == ENUMERAL_TYPE) |
| 7100 | { |
| 7101 | /* These fields get set even for opaque enums that lack a |
| 7102 | definition, so we stream them directly for each ENUMERAL_TYPE. |
| 7103 | We stream TYPE_VALUES as part of the definition. */ |
| 7104 | RT (t->type_non_common.maxval); |
| 7105 | RT (t->type_non_common.minval); |
| 7106 | } |
| 7107 | /* Records and unions hold FIELDS, VFIELD & BINFO on these |
| 7108 | things. */ |
| 7109 | else if (!RECORD_OR_UNION_CODE_P (code)) |
| 7110 | { |
| 7111 | /* This is not clobbering TYPE_CACHED_VALUES, because this |
| 7112 | is a type that doesn't have any. */ |
| 7113 | gcc_checking_assert (!TYPE_CACHED_VALUES_P (t)); |
| 7114 | RT (t->type_non_common.values); |
| 7115 | RT (t->type_non_common.maxval); |
| 7116 | RT (t->type_non_common.minval); |
| 7117 | } |
| 7118 | |
| 7119 | RT (t->type_non_common.lang_1); |
| 7120 | } |
| 7121 | |
| 7122 | if (CODE_CONTAINS_STRUCT (code, TS_EXP)) |
| 7123 | { |
| 7124 | t->exp.locus = state->read_location (*this); |
| 7125 | if (has_warning_spec (t)) |
| 7126 | put_warning_spec (t, u ()); |
| 7127 | |
| 7128 | bool vl = TREE_CODE_CLASS (code) == tcc_vl_exp; |
| 7129 | unsigned limit = (vl ? VL_EXP_OPERAND_LENGTH (t) |
| 7130 | : TREE_OPERAND_LENGTH (t)); |
| 7131 | unsigned ix = unsigned (vl); |
| 7132 | if (code == REQUIRES_EXPR) |
| 7133 | { |
| 7134 | REQUIRES_EXPR_PARMS (t) = chained_decls (); |
| 7135 | ++ix; |
| 7136 | } |
| 7137 | for (; ix != limit; ix++) |
| 7138 | RTU (TREE_OPERAND (t, ix)); |
| 7139 | } |
| 7140 | |
| 7141 | /* Then by CODE. Special cases and/or 1:1 tree shape |
| 7142 | correspondance. */ |
| 7143 | switch (code) |
| 7144 | { |
| 7145 | default: |
| 7146 | break; |
| 7147 | |
| 7148 | case ARGUMENT_PACK_SELECT: |
| 7149 | case DEFERRED_PARSE: |
| 7150 | case IDENTIFIER_NODE: |
| 7151 | case BINDING_VECTOR: |
| 7152 | case SSA_NAME: |
| 7153 | case TRANSLATION_UNIT_DECL: |
| 7154 | case USERDEF_LITERAL: |
| 7155 | return false; /* Should never meet. */ |
| 7156 | |
| 7157 | /* Constants. */ |
| 7158 | case COMPLEX_CST: |
| 7159 | RT (TREE_REALPART (t)); |
| 7160 | RT (TREE_IMAGPART (t)); |
| 7161 | break; |
| 7162 | |
| 7163 | case FIXED_CST: |
| 7164 | /* Not suported in C++. */ |
| 7165 | return false; |
| 7166 | |
| 7167 | case INTEGER_CST: |
| 7168 | { |
| 7169 | unsigned num = TREE_INT_CST_EXT_NUNITS (t); |
| 7170 | for (unsigned ix = 0; ix != num; ix++) |
| 7171 | TREE_INT_CST_ELT (t, ix) = wu (); |
| 7172 | } |
| 7173 | break; |
| 7174 | |
| 7175 | case POLY_INT_CST: |
| 7176 | for (unsigned ix = 0; ix != NUM_POLY_INT_COEFFS; ix++) |
| 7177 | RT (POLY_INT_CST_COEFF (t, ix)); |
| 7178 | break; |
| 7179 | |
| 7180 | case REAL_CST: |
| 7181 | if (const void *bytes = buf (len: sizeof (real_value))) |
| 7182 | memcpy (TREE_REAL_CST_PTR (t), src: bytes, n: sizeof (real_value)); |
| 7183 | break; |
| 7184 | |
| 7185 | case STRING_CST: |
| 7186 | /* Streamed during start. */ |
| 7187 | break; |
| 7188 | |
| 7189 | case RAW_DATA_CST: |
| 7190 | RT (RAW_DATA_OWNER (t)); |
| 7191 | gcc_assert (TREE_CODE (RAW_DATA_OWNER (t)) == STRING_CST |
| 7192 | && TREE_STRING_LENGTH (RAW_DATA_OWNER (t))); |
| 7193 | RAW_DATA_POINTER (t) = TREE_STRING_POINTER (RAW_DATA_OWNER (t)) + z (); |
| 7194 | break; |
| 7195 | |
| 7196 | case VECTOR_CST: |
| 7197 | for (unsigned ix = vector_cst_encoded_nelts (t); ix--;) |
| 7198 | RT (VECTOR_CST_ENCODED_ELT (t, ix)); |
| 7199 | break; |
| 7200 | |
| 7201 | /* Decls. */ |
| 7202 | case VAR_DECL: |
| 7203 | if (DECL_CONTEXT (t) |
| 7204 | && TREE_CODE (DECL_CONTEXT (t)) != FUNCTION_DECL) |
| 7205 | { |
| 7206 | if (DECL_HAS_VALUE_EXPR_P (t)) |
| 7207 | { |
| 7208 | tree val = tree_node (); |
| 7209 | SET_DECL_VALUE_EXPR (t, val); |
| 7210 | } |
| 7211 | break; |
| 7212 | } |
| 7213 | /* FALLTHROUGH */ |
| 7214 | |
| 7215 | case RESULT_DECL: |
| 7216 | case PARM_DECL: |
| 7217 | if (DECL_HAS_VALUE_EXPR_P (t)) |
| 7218 | { |
| 7219 | /* The DECL_VALUE hash table is a cache, thus if we're |
| 7220 | reading a duplicate (which we end up discarding), the |
| 7221 | value expr will also be cleaned up at the next gc. */ |
| 7222 | tree val = tree_node (); |
| 7223 | SET_DECL_VALUE_EXPR (t, val); |
| 7224 | } |
| 7225 | /* FALLTHROUGH */ |
| 7226 | |
| 7227 | case CONST_DECL: |
| 7228 | case IMPORTED_DECL: |
| 7229 | RT (t->decl_common.initial); |
| 7230 | break; |
| 7231 | |
| 7232 | case FIELD_DECL: |
| 7233 | RT (t->field_decl.offset); |
| 7234 | RT (t->field_decl.bit_field_type); |
| 7235 | RT (t->field_decl.qualifier); |
| 7236 | RT (t->field_decl.bit_offset); |
| 7237 | RT (t->field_decl.fcontext); |
| 7238 | RT (t->decl_common.initial); |
| 7239 | break; |
| 7240 | |
| 7241 | case LABEL_DECL: |
| 7242 | RU (t->label_decl.label_decl_uid); |
| 7243 | RU (t->label_decl.eh_landing_pad_nr); |
| 7244 | break; |
| 7245 | |
| 7246 | case FUNCTION_DECL: |
| 7247 | { |
| 7248 | unsigned bltin = u (); |
| 7249 | t->function_decl.built_in_class = built_in_class (bltin); |
| 7250 | if (bltin != NOT_BUILT_IN) |
| 7251 | { |
| 7252 | bltin = u (); |
| 7253 | DECL_UNCHECKED_FUNCTION_CODE (t) = built_in_function (bltin); |
| 7254 | } |
| 7255 | |
| 7256 | RT (t->function_decl.personality); |
| 7257 | /* These properties are not streamed, and should be reconstructed |
| 7258 | from any function attributes. */ |
| 7259 | // t->function_decl.function_specific_target); |
| 7260 | // t->function_decl.function_specific_optimization); |
| 7261 | RT (t->function_decl.vindex); |
| 7262 | |
| 7263 | if (DECL_HAS_DEPENDENT_EXPLICIT_SPEC_P (t)) |
| 7264 | { |
| 7265 | tree spec; |
| 7266 | RT (spec); |
| 7267 | store_explicit_specifier (t, spec); |
| 7268 | } |
| 7269 | } |
| 7270 | break; |
| 7271 | |
| 7272 | case USING_DECL: |
| 7273 | /* USING_DECL_DECLS */ |
| 7274 | RT (t->decl_common.initial); |
| 7275 | /* FALLTHROUGH */ |
| 7276 | |
| 7277 | case TYPE_DECL: |
| 7278 | /* USING_DECL: USING_DECL_SCOPE */ |
| 7279 | /* TYPE_DECL: DECL_ORIGINAL_TYPE */ |
| 7280 | RT (t->decl_non_common.result); |
| 7281 | break; |
| 7282 | |
| 7283 | /* Miscellaneous common nodes. */ |
| 7284 | case BLOCK: |
| 7285 | t->block.locus = state->read_location (*this); |
| 7286 | t->block.end_locus = state->read_location (*this); |
| 7287 | |
| 7288 | for (tree *chain = &t->block.vars;;) |
| 7289 | if (tree decl = tree_node ()) |
| 7290 | { |
| 7291 | /* For a deduplicated local type or enumerator, chain the |
| 7292 | duplicate decl instead of the canonical in-TU decl. Seeing |
| 7293 | a duplicate here means the containing function whose body |
| 7294 | we're streaming in is a duplicate too, so we'll end up |
| 7295 | discarding this BLOCK (and the rest of the duplicate function |
| 7296 | body) anyway. */ |
| 7297 | decl = maybe_duplicate (decl); |
| 7298 | |
| 7299 | if (!DECL_P (decl)) |
| 7300 | { |
| 7301 | set_overrun (); |
| 7302 | break; |
| 7303 | } |
| 7304 | |
| 7305 | /* If DECL_CHAIN is already set then this was a backreference to a |
| 7306 | local type or enumerator from a previous read (PR c++/114630). |
| 7307 | Let's copy the node so we can keep building the chain for ODR |
| 7308 | checking later. */ |
| 7309 | if (DECL_CHAIN (decl)) |
| 7310 | { |
| 7311 | gcc_checking_assert (TREE_CODE (decl) == TYPE_DECL |
| 7312 | && find_duplicate (DECL_CONTEXT (decl))); |
| 7313 | decl = copy_decl (decl); |
| 7314 | } |
| 7315 | |
| 7316 | *chain = decl; |
| 7317 | chain = &DECL_CHAIN (decl); |
| 7318 | } |
| 7319 | else |
| 7320 | break; |
| 7321 | |
| 7322 | /* nonlocalized_vars is middle-end. */ |
| 7323 | RT (t->block.subblocks); |
| 7324 | RT (t->block.supercontext); |
| 7325 | RT (t->block.abstract_origin); |
| 7326 | /* fragment_origin, fragment_chain are middle-end. */ |
| 7327 | RT (t->block.chain); |
| 7328 | /* nonlocalized_vars, block_num, die are middle endy/debug |
| 7329 | things. */ |
| 7330 | break; |
| 7331 | |
| 7332 | case CALL_EXPR: |
| 7333 | RUC (internal_fn, t->base.u.ifn); |
| 7334 | break; |
| 7335 | |
| 7336 | case CONSTRUCTOR: |
| 7337 | // Streamed after the node's type. |
| 7338 | break; |
| 7339 | |
| 7340 | case OMP_CLAUSE: |
| 7341 | { |
| 7342 | RU (t->omp_clause.subcode.map_kind); |
| 7343 | t->omp_clause.locus = state->read_location (*this); |
| 7344 | |
| 7345 | unsigned len = omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; |
| 7346 | for (unsigned ix = 0; ix != len; ix++) |
| 7347 | RT (t->omp_clause.ops[ix]); |
| 7348 | } |
| 7349 | break; |
| 7350 | |
| 7351 | case STATEMENT_LIST: |
| 7352 | { |
| 7353 | tree_stmt_iterator iter = tsi_start (t); |
| 7354 | for (tree stmt; RT (stmt);) |
| 7355 | { |
| 7356 | if (TREE_CODE (stmt) == DEBUG_BEGIN_STMT |
| 7357 | && !MAY_HAVE_DEBUG_MARKER_STMTS) |
| 7358 | continue; |
| 7359 | tsi_link_after (&iter, stmt, TSI_CONTINUE_LINKING); |
| 7360 | } |
| 7361 | } |
| 7362 | break; |
| 7363 | |
| 7364 | case OPTIMIZATION_NODE: |
| 7365 | case TARGET_OPTION_NODE: |
| 7366 | /* Not implemented, see trees_out::core_vals. */ |
| 7367 | gcc_unreachable (); |
| 7368 | break; |
| 7369 | |
| 7370 | case TREE_BINFO: |
| 7371 | RT (t->binfo.common.chain); |
| 7372 | RT (t->binfo.offset); |
| 7373 | RT (t->binfo.inheritance); |
| 7374 | RT (t->binfo.vptr_field); |
| 7375 | |
| 7376 | /* Do not mark the vtables as USED in the address expressions |
| 7377 | here. */ |
| 7378 | unused++; |
| 7379 | RT (t->binfo.vtable); |
| 7380 | RT (t->binfo.virtuals); |
| 7381 | RT (t->binfo.vtt_subvtt); |
| 7382 | RT (t->binfo.vtt_vptr); |
| 7383 | unused--; |
| 7384 | |
| 7385 | BINFO_BASE_ACCESSES (t) = tree_vec (); |
| 7386 | if (!get_overrun ()) |
| 7387 | { |
| 7388 | unsigned num = vec_safe_length (BINFO_BASE_ACCESSES (t)); |
| 7389 | for (unsigned ix = 0; ix != num; ix++) |
| 7390 | BINFO_BASE_APPEND (t, tree_node ()); |
| 7391 | } |
| 7392 | break; |
| 7393 | |
| 7394 | case TREE_LIST: |
| 7395 | RT (t->list.purpose); |
| 7396 | RT (t->list.value); |
| 7397 | RT (t->list.common.chain); |
| 7398 | break; |
| 7399 | |
| 7400 | case TREE_VEC: |
| 7401 | for (unsigned ix = TREE_VEC_LENGTH (t); ix--;) |
| 7402 | RT (TREE_VEC_ELT (t, ix)); |
| 7403 | RT (t->type_common.common.chain); |
| 7404 | break; |
| 7405 | |
| 7406 | /* C++-specific nodes ... */ |
| 7407 | case BASELINK: |
| 7408 | RT (((lang_tree_node *)t)->baselink.binfo); |
| 7409 | RTU (((lang_tree_node *)t)->baselink.functions); |
| 7410 | RT (((lang_tree_node *)t)->baselink.access_binfo); |
| 7411 | RT (((lang_tree_node *)t)->baselink.common.chain); |
| 7412 | break; |
| 7413 | |
| 7414 | case CONSTRAINT_INFO: |
| 7415 | RT (((lang_tree_node *)t)->constraint_info.template_reqs); |
| 7416 | RT (((lang_tree_node *)t)->constraint_info.declarator_reqs); |
| 7417 | RT (((lang_tree_node *)t)->constraint_info.associated_constr); |
| 7418 | break; |
| 7419 | |
| 7420 | case DEFERRED_NOEXCEPT: |
| 7421 | RT (((lang_tree_node *)t)->deferred_noexcept.pattern); |
| 7422 | RT (((lang_tree_node *)t)->deferred_noexcept.args); |
| 7423 | break; |
| 7424 | |
| 7425 | case LAMBDA_EXPR: |
| 7426 | RT (((lang_tree_node *)t)->lambda_expression.capture_list); |
| 7427 | RT (((lang_tree_node *)t)->lambda_expression.this_capture); |
| 7428 | RT (((lang_tree_node *)t)->lambda_expression.extra_scope); |
| 7429 | RT (((lang_tree_node *)t)->lambda_expression.regen_info); |
| 7430 | RT (((lang_tree_node *)t)->lambda_expression.extra_args); |
| 7431 | /* lambda_expression.pending_proxies is NULL */ |
| 7432 | ((lang_tree_node *)t)->lambda_expression.locus |
| 7433 | = state->read_location (*this); |
| 7434 | RUC (cp_lambda_default_capture_mode_type, |
| 7435 | ((lang_tree_node *)t)->lambda_expression.default_capture_mode); |
| 7436 | RU (((lang_tree_node *)t)->lambda_expression.discriminator_scope); |
| 7437 | RU (((lang_tree_node *)t)->lambda_expression.discriminator_sig); |
| 7438 | break; |
| 7439 | |
| 7440 | case OVERLOAD: |
| 7441 | RT (((lang_tree_node *)t)->overload.function); |
| 7442 | RT (t->common.chain); |
| 7443 | break; |
| 7444 | |
| 7445 | case PTRMEM_CST: |
| 7446 | RT (((lang_tree_node *)t)->ptrmem.member); |
| 7447 | break; |
| 7448 | |
| 7449 | case STATIC_ASSERT: |
| 7450 | RT (((lang_tree_node *)t)->static_assertion.condition); |
| 7451 | RT (((lang_tree_node *)t)->static_assertion.message); |
| 7452 | ((lang_tree_node *)t)->static_assertion.location |
| 7453 | = state->read_location (*this); |
| 7454 | break; |
| 7455 | |
| 7456 | case TEMPLATE_DECL: |
| 7457 | /* Streamed when reading the raw template decl itself. */ |
| 7458 | gcc_assert (((lang_tree_node *)t)->template_decl.arguments); |
| 7459 | gcc_assert (((lang_tree_node *)t)->template_decl.result); |
| 7460 | if (DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (t)) |
| 7461 | RT (DECL_CHAIN (t)); |
| 7462 | break; |
| 7463 | |
| 7464 | case TEMPLATE_INFO: |
| 7465 | RT (((lang_tree_node *)t)->template_info.tmpl); |
| 7466 | RT (((lang_tree_node *)t)->template_info.args); |
| 7467 | RT (((lang_tree_node *)t)->template_info.partial); |
| 7468 | if (unsigned len = u ()) |
| 7469 | { |
| 7470 | auto &ac = (((lang_tree_node *)t) |
| 7471 | ->template_info.deferred_access_checks); |
| 7472 | vec_alloc (v&: ac, nelems: len); |
| 7473 | for (unsigned ix = 0; ix != len; ix++) |
| 7474 | { |
| 7475 | deferred_access_check m; |
| 7476 | |
| 7477 | RT (m.binfo); |
| 7478 | RT (m.decl); |
| 7479 | RT (m.diag_decl); |
| 7480 | m.loc = state->read_location (*this); |
| 7481 | ac->quick_push (obj: m); |
| 7482 | } |
| 7483 | } |
| 7484 | break; |
| 7485 | |
| 7486 | case TEMPLATE_PARM_INDEX: |
| 7487 | RU (((lang_tree_node *)t)->tpi.index); |
| 7488 | RU (((lang_tree_node *)t)->tpi.level); |
| 7489 | RU (((lang_tree_node *)t)->tpi.orig_level); |
| 7490 | RT (((lang_tree_node *)t)->tpi.decl); |
| 7491 | break; |
| 7492 | |
| 7493 | case TRAIT_EXPR: |
| 7494 | RT (((lang_tree_node *)t)->trait_expression.type1); |
| 7495 | RT (((lang_tree_node *)t)->trait_expression.type2); |
| 7496 | RUC (cp_trait_kind, ((lang_tree_node *)t)->trait_expression.kind); |
| 7497 | break; |
| 7498 | |
| 7499 | case TU_LOCAL_ENTITY: |
| 7500 | RT (((lang_tree_node *)t)->tu_local_entity.name); |
| 7501 | ((lang_tree_node *)t)->tu_local_entity.loc |
| 7502 | = state->read_location (*this); |
| 7503 | } |
| 7504 | |
| 7505 | if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) |
| 7506 | { |
| 7507 | tree type = tree_node (); |
| 7508 | |
| 7509 | if (type && code == ENUMERAL_TYPE && !ENUM_FIXED_UNDERLYING_TYPE_P (t)) |
| 7510 | { |
| 7511 | unsigned precision = u (); |
| 7512 | |
| 7513 | type = build_distinct_type_copy (type); |
| 7514 | TYPE_PRECISION (type) = precision; |
| 7515 | set_min_and_max_values_for_integral_type (type, precision, |
| 7516 | TYPE_SIGN (type)); |
| 7517 | } |
| 7518 | |
| 7519 | if (code != TEMPLATE_DECL) |
| 7520 | t->typed.type = type; |
| 7521 | } |
| 7522 | |
| 7523 | if (TREE_CODE (t) == CONSTRUCTOR) |
| 7524 | if (unsigned len = u ()) |
| 7525 | { |
| 7526 | vec_alloc (v&: t->constructor.elts, nelems: len); |
| 7527 | for (unsigned ix = 0; ix != len; ix++) |
| 7528 | { |
| 7529 | constructor_elt elt; |
| 7530 | |
| 7531 | RT (elt.index); |
| 7532 | RTU (elt.value); |
| 7533 | t->constructor.elts->quick_push (obj: elt); |
| 7534 | } |
| 7535 | } |
| 7536 | |
| 7537 | #undef RT |
| 7538 | #undef RM |
| 7539 | #undef RU |
| 7540 | return !get_overrun (); |
| 7541 | } |
| 7542 | |
| 7543 | void |
| 7544 | trees_out::lang_decl_vals (tree t) |
| 7545 | { |
| 7546 | const struct lang_decl *lang = DECL_LANG_SPECIFIC (t); |
| 7547 | #define WU(X) (u (X)) |
| 7548 | #define WT(X) (tree_node (X)) |
| 7549 | /* Module index already written. */ |
| 7550 | switch (lang->u.base.selector) |
| 7551 | { |
| 7552 | default: |
| 7553 | gcc_unreachable (); |
| 7554 | |
| 7555 | case lds_fn: /* lang_decl_fn. */ |
| 7556 | if (streaming_p ()) |
| 7557 | { |
| 7558 | if (DECL_NAME (t) && IDENTIFIER_OVL_OP_P (DECL_NAME (t))) |
| 7559 | WU (lang->u.fn.ovl_op_code); |
| 7560 | } |
| 7561 | |
| 7562 | if (DECL_CLASS_SCOPE_P (t) || DECL_UNIQUE_FRIEND_P (t)) |
| 7563 | WT (lang->u.fn.context); |
| 7564 | |
| 7565 | if (lang->u.fn.thunk_p) |
| 7566 | { |
| 7567 | /* The thunked-to function. */ |
| 7568 | WT (lang->u.fn.befriending_classes); |
| 7569 | if (streaming_p ()) |
| 7570 | wi (v: lang->u.fn.u5.fixed_offset); |
| 7571 | } |
| 7572 | else if (decl_tls_wrapper_p (t)) |
| 7573 | /* The wrapped variable. */ |
| 7574 | WT (lang->u.fn.befriending_classes); |
| 7575 | else |
| 7576 | WT (lang->u.fn.u5.cloned_function); |
| 7577 | |
| 7578 | if (FNDECL_USED_AUTO (t)) |
| 7579 | WT (lang->u.fn.u.saved_auto_return_type); |
| 7580 | |
| 7581 | goto lds_min; |
| 7582 | |
| 7583 | case lds_decomp: /* lang_decl_decomp. */ |
| 7584 | WT (lang->u.decomp.base); |
| 7585 | goto lds_min; |
| 7586 | |
| 7587 | case lds_min: /* lang_decl_min. */ |
| 7588 | lds_min: |
| 7589 | WT (lang->u.min.template_info); |
| 7590 | { |
| 7591 | tree access = lang->u.min.access; |
| 7592 | |
| 7593 | /* DECL_ACCESS needs to be maintained by the definition of the |
| 7594 | (derived) class that changes the access. The other users |
| 7595 | of DECL_ACCESS need to write it here. */ |
| 7596 | if (!DECL_THUNK_P (t) |
| 7597 | && (DECL_CONTEXT (t) && TYPE_P (DECL_CONTEXT (t)))) |
| 7598 | access = NULL_TREE; |
| 7599 | |
| 7600 | WT (access); |
| 7601 | } |
| 7602 | /* A friend template specialisation stashes its owning class on its |
| 7603 | DECL_CHAIN; we need to reconstruct this, but it needs to happen |
| 7604 | after we stream the template_info so readers can know this is such |
| 7605 | an entity. */ |
| 7606 | if (decl_specialization_friend_p (decl: t)) |
| 7607 | WT (t->common.chain); |
| 7608 | break; |
| 7609 | |
| 7610 | case lds_ns: /* lang_decl_ns. */ |
| 7611 | break; |
| 7612 | |
| 7613 | case lds_parm: /* lang_decl_parm. */ |
| 7614 | if (streaming_p ()) |
| 7615 | { |
| 7616 | WU (lang->u.parm.level); |
| 7617 | WU (lang->u.parm.index); |
| 7618 | } |
| 7619 | break; |
| 7620 | } |
| 7621 | #undef WU |
| 7622 | #undef WT |
| 7623 | } |
| 7624 | |
| 7625 | bool |
| 7626 | trees_in::lang_decl_vals (tree t) |
| 7627 | { |
| 7628 | struct lang_decl *lang = DECL_LANG_SPECIFIC (t); |
| 7629 | #define RU(X) ((X) = u ()) |
| 7630 | #define RT(X) ((X) = tree_node ()) |
| 7631 | |
| 7632 | /* Module index already read. */ |
| 7633 | switch (lang->u.base.selector) |
| 7634 | { |
| 7635 | default: |
| 7636 | gcc_unreachable (); |
| 7637 | |
| 7638 | case lds_fn: /* lang_decl_fn. */ |
| 7639 | if (DECL_NAME (t) && IDENTIFIER_OVL_OP_P (DECL_NAME (t))) |
| 7640 | { |
| 7641 | unsigned code = u (); |
| 7642 | |
| 7643 | /* Check consistency. */ |
| 7644 | if (code >= OVL_OP_MAX |
| 7645 | || (ovl_op_info[IDENTIFIER_ASSIGN_OP_P (DECL_NAME (t))][code] |
| 7646 | .ovl_op_code) == OVL_OP_ERROR_MARK) |
| 7647 | set_overrun (); |
| 7648 | else |
| 7649 | lang->u.fn.ovl_op_code = code; |
| 7650 | } |
| 7651 | |
| 7652 | if (DECL_CLASS_SCOPE_P (t) || DECL_UNIQUE_FRIEND_P (t)) |
| 7653 | RT (lang->u.fn.context); |
| 7654 | |
| 7655 | if (lang->u.fn.thunk_p) |
| 7656 | { |
| 7657 | RT (lang->u.fn.befriending_classes); |
| 7658 | lang->u.fn.u5.fixed_offset = wi (); |
| 7659 | } |
| 7660 | else if (decl_tls_wrapper_p (t)) |
| 7661 | RT (lang->u.fn.befriending_classes); |
| 7662 | else |
| 7663 | RT (lang->u.fn.u5.cloned_function); |
| 7664 | |
| 7665 | if (FNDECL_USED_AUTO (t)) |
| 7666 | RT (lang->u.fn.u.saved_auto_return_type); |
| 7667 | goto lds_min; |
| 7668 | |
| 7669 | case lds_decomp: /* lang_decl_decomp. */ |
| 7670 | RT (lang->u.decomp.base); |
| 7671 | goto lds_min; |
| 7672 | |
| 7673 | case lds_min: /* lang_decl_min. */ |
| 7674 | lds_min: |
| 7675 | RT (lang->u.min.template_info); |
| 7676 | RT (lang->u.min.access); |
| 7677 | if (decl_specialization_friend_p (decl: t)) |
| 7678 | RT (t->common.chain); |
| 7679 | break; |
| 7680 | |
| 7681 | case lds_ns: /* lang_decl_ns. */ |
| 7682 | break; |
| 7683 | |
| 7684 | case lds_parm: /* lang_decl_parm. */ |
| 7685 | RU (lang->u.parm.level); |
| 7686 | RU (lang->u.parm.index); |
| 7687 | break; |
| 7688 | } |
| 7689 | #undef RU |
| 7690 | #undef RT |
| 7691 | return !get_overrun (); |
| 7692 | } |
| 7693 | |
| 7694 | /* Most of the value contents of lang_type is streamed in |
| 7695 | define_class. */ |
| 7696 | |
| 7697 | void |
| 7698 | trees_out::lang_type_vals (tree t) |
| 7699 | { |
| 7700 | const struct lang_type *lang = TYPE_LANG_SPECIFIC (t); |
| 7701 | #define WU(X) (u (X)) |
| 7702 | #define WT(X) (tree_node (X)) |
| 7703 | if (streaming_p ()) |
| 7704 | WU (lang->align); |
| 7705 | #undef WU |
| 7706 | #undef WT |
| 7707 | } |
| 7708 | |
| 7709 | bool |
| 7710 | trees_in::lang_type_vals (tree t) |
| 7711 | { |
| 7712 | struct lang_type *lang = TYPE_LANG_SPECIFIC (t); |
| 7713 | #define RU(X) ((X) = u ()) |
| 7714 | #define RT(X) ((X) = tree_node ()) |
| 7715 | RU (lang->align); |
| 7716 | #undef RU |
| 7717 | #undef RT |
| 7718 | return !get_overrun (); |
| 7719 | } |
| 7720 | |
| 7721 | /* Write out the bools of T, including information about any |
| 7722 | LANG_SPECIFIC information. Including allocation of any lang |
| 7723 | specific object. */ |
| 7724 | |
| 7725 | void |
| 7726 | trees_out::tree_node_bools (tree t) |
| 7727 | { |
| 7728 | gcc_checking_assert (streaming_p ()); |
| 7729 | |
| 7730 | /* We should never stream a namespace. */ |
| 7731 | gcc_checking_assert (TREE_CODE (t) != NAMESPACE_DECL |
| 7732 | || DECL_NAMESPACE_ALIAS (t)); |
| 7733 | |
| 7734 | bits_out bits = stream_bits (); |
| 7735 | core_bools (t, bits); |
| 7736 | |
| 7737 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
| 7738 | { |
| 7739 | case tcc_declaration: |
| 7740 | { |
| 7741 | bool specific = DECL_LANG_SPECIFIC (t) != NULL; |
| 7742 | bits.b (x: specific); |
| 7743 | if (specific && VAR_P (t)) |
| 7744 | bits.b (DECL_DECOMPOSITION_P (t)); |
| 7745 | if (specific) |
| 7746 | lang_decl_bools (t, bits); |
| 7747 | } |
| 7748 | break; |
| 7749 | |
| 7750 | case tcc_type: |
| 7751 | { |
| 7752 | bool specific = (TYPE_MAIN_VARIANT (t) == t |
| 7753 | && TYPE_LANG_SPECIFIC (t) != NULL); |
| 7754 | gcc_assert (TYPE_LANG_SPECIFIC (t) |
| 7755 | == TYPE_LANG_SPECIFIC (TYPE_MAIN_VARIANT (t))); |
| 7756 | |
| 7757 | bits.b (x: specific); |
| 7758 | if (specific) |
| 7759 | lang_type_bools (t, bits); |
| 7760 | } |
| 7761 | break; |
| 7762 | |
| 7763 | default: |
| 7764 | break; |
| 7765 | } |
| 7766 | |
| 7767 | bits.bflush (); |
| 7768 | } |
| 7769 | |
| 7770 | bool |
| 7771 | trees_in::tree_node_bools (tree t) |
| 7772 | { |
| 7773 | bits_in bits = stream_bits (); |
| 7774 | bool ok = core_bools (t, bits); |
| 7775 | |
| 7776 | if (ok) |
| 7777 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
| 7778 | { |
| 7779 | case tcc_declaration: |
| 7780 | if (bits.b ()) |
| 7781 | { |
| 7782 | bool decomp = VAR_P (t) && bits.b (); |
| 7783 | |
| 7784 | ok = maybe_add_lang_decl_raw (t, decomp_p: decomp); |
| 7785 | if (ok) |
| 7786 | ok = lang_decl_bools (t, bits); |
| 7787 | } |
| 7788 | break; |
| 7789 | |
| 7790 | case tcc_type: |
| 7791 | if (bits.b ()) |
| 7792 | { |
| 7793 | ok = maybe_add_lang_type_raw (t); |
| 7794 | if (ok) |
| 7795 | ok = lang_type_bools (t, bits); |
| 7796 | } |
| 7797 | break; |
| 7798 | |
| 7799 | default: |
| 7800 | break; |
| 7801 | } |
| 7802 | |
| 7803 | bits.bflush (); |
| 7804 | if (!ok || get_overrun ()) |
| 7805 | return false; |
| 7806 | |
| 7807 | return true; |
| 7808 | } |
| 7809 | |
| 7810 | |
| 7811 | /* Write out the lang-specific vals of node T. */ |
| 7812 | |
| 7813 | void |
| 7814 | trees_out::lang_vals (tree t) |
| 7815 | { |
| 7816 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
| 7817 | { |
| 7818 | case tcc_declaration: |
| 7819 | if (DECL_LANG_SPECIFIC (t)) |
| 7820 | lang_decl_vals (t); |
| 7821 | break; |
| 7822 | |
| 7823 | case tcc_type: |
| 7824 | if (TYPE_MAIN_VARIANT (t) == t && TYPE_LANG_SPECIFIC (t)) |
| 7825 | lang_type_vals (t); |
| 7826 | break; |
| 7827 | |
| 7828 | default: |
| 7829 | break; |
| 7830 | } |
| 7831 | } |
| 7832 | |
| 7833 | bool |
| 7834 | trees_in::lang_vals (tree t) |
| 7835 | { |
| 7836 | bool ok = true; |
| 7837 | |
| 7838 | switch (TREE_CODE_CLASS (TREE_CODE (t))) |
| 7839 | { |
| 7840 | case tcc_declaration: |
| 7841 | if (DECL_LANG_SPECIFIC (t)) |
| 7842 | ok = lang_decl_vals (t); |
| 7843 | break; |
| 7844 | |
| 7845 | case tcc_type: |
| 7846 | if (TYPE_LANG_SPECIFIC (t)) |
| 7847 | ok = lang_type_vals (t); |
| 7848 | else |
| 7849 | TYPE_LANG_SPECIFIC (t) = TYPE_LANG_SPECIFIC (TYPE_MAIN_VARIANT (t)); |
| 7850 | break; |
| 7851 | |
| 7852 | default: |
| 7853 | break; |
| 7854 | } |
| 7855 | |
| 7856 | return ok; |
| 7857 | } |
| 7858 | |
| 7859 | /* Write out the value fields of node T. */ |
| 7860 | |
| 7861 | void |
| 7862 | trees_out::tree_node_vals (tree t) |
| 7863 | { |
| 7864 | core_vals (t); |
| 7865 | lang_vals (t); |
| 7866 | } |
| 7867 | |
| 7868 | bool |
| 7869 | trees_in::tree_node_vals (tree t) |
| 7870 | { |
| 7871 | bool ok = core_vals (t); |
| 7872 | if (ok) |
| 7873 | ok = lang_vals (t); |
| 7874 | |
| 7875 | return ok; |
| 7876 | } |
| 7877 | |
| 7878 | |
| 7879 | /* If T is a back reference, fixed reference or NULL, write out its |
| 7880 | code and return WK_none. Otherwise return WK_value if we must write |
| 7881 | by value, or WK_normal otherwise. */ |
| 7882 | |
| 7883 | walk_kind |
| 7884 | trees_out::ref_node (tree t) |
| 7885 | { |
| 7886 | if (!t) |
| 7887 | { |
| 7888 | if (streaming_p ()) |
| 7889 | { |
| 7890 | /* NULL_TREE -> tt_null. */ |
| 7891 | null_count++; |
| 7892 | i (v: tt_null); |
| 7893 | } |
| 7894 | return WK_none; |
| 7895 | } |
| 7896 | |
| 7897 | if (!TREE_VISITED (t)) |
| 7898 | return WK_normal; |
| 7899 | |
| 7900 | /* An already-visited tree. It must be in the map. */ |
| 7901 | int val = get_tag (t); |
| 7902 | |
| 7903 | if (val == tag_value) |
| 7904 | /* An entry we should walk into. */ |
| 7905 | return WK_value; |
| 7906 | |
| 7907 | const char *kind; |
| 7908 | |
| 7909 | if (val <= tag_backref) |
| 7910 | { |
| 7911 | /* Back reference -> -ve number */ |
| 7912 | if (streaming_p ()) |
| 7913 | i (v: val); |
| 7914 | kind = "backref" ; |
| 7915 | } |
| 7916 | else if (val >= tag_fixed) |
| 7917 | { |
| 7918 | /* Fixed reference -> tt_fixed */ |
| 7919 | val -= tag_fixed; |
| 7920 | if (streaming_p ()) |
| 7921 | i (v: tt_fixed), u (v: val); |
| 7922 | kind = "fixed" ; |
| 7923 | } |
| 7924 | |
| 7925 | if (streaming_p ()) |
| 7926 | { |
| 7927 | back_ref_count++; |
| 7928 | dump (dumper::TREE) |
| 7929 | && dump ("Wrote %s:%d %C:%N%S" , kind, val, TREE_CODE (t), t, t); |
| 7930 | } |
| 7931 | return WK_none; |
| 7932 | } |
| 7933 | |
| 7934 | tree |
| 7935 | trees_in::back_ref (int tag) |
| 7936 | { |
| 7937 | tree res = NULL_TREE; |
| 7938 | |
| 7939 | if (tag < 0 && unsigned (~tag) < back_refs.length ()) |
| 7940 | res = back_refs[~tag]; |
| 7941 | |
| 7942 | if (!res |
| 7943 | /* Checking TREE_CODE is a dereference, so we know this is not a |
| 7944 | wild pointer. Checking the code provides evidence we've not |
| 7945 | corrupted something. */ |
| 7946 | || TREE_CODE (res) >= MAX_TREE_CODES) |
| 7947 | set_overrun (); |
| 7948 | else |
| 7949 | dump (dumper::TREE) && dump ("Read backref:%d found %C:%N%S" , tag, |
| 7950 | TREE_CODE (res), res, res); |
| 7951 | return res; |
| 7952 | } |
| 7953 | |
| 7954 | unsigned |
| 7955 | trees_out::add_indirect_tpl_parms (tree parms) |
| 7956 | { |
| 7957 | unsigned len = 0; |
| 7958 | for (; parms; parms = TREE_CHAIN (parms), len++) |
| 7959 | { |
| 7960 | if (TREE_VISITED (parms)) |
| 7961 | break; |
| 7962 | |
| 7963 | int tag = insert (t: parms); |
| 7964 | if (streaming_p ()) |
| 7965 | dump (dumper::TREE) |
| 7966 | && dump ("Indirect:%d template's parameter %u %C:%N" , |
| 7967 | tag, len, TREE_CODE (parms), parms); |
| 7968 | } |
| 7969 | |
| 7970 | if (streaming_p ()) |
| 7971 | u (v: len); |
| 7972 | |
| 7973 | return len; |
| 7974 | } |
| 7975 | |
| 7976 | unsigned |
| 7977 | trees_in::add_indirect_tpl_parms (tree parms) |
| 7978 | { |
| 7979 | unsigned len = u (); |
| 7980 | for (unsigned ix = 0; ix != len; parms = TREE_CHAIN (parms), ix++) |
| 7981 | { |
| 7982 | int tag = insert (t: parms); |
| 7983 | dump (dumper::TREE) |
| 7984 | && dump ("Indirect:%d template's parameter %u %C:%N" , |
| 7985 | tag, ix, TREE_CODE (parms), parms); |
| 7986 | } |
| 7987 | |
| 7988 | return len; |
| 7989 | } |
| 7990 | |
| 7991 | /* We've just found DECL by name. Insert nodes that come with it, but |
| 7992 | cannot be found by name, so we'll not accidentally walk into them. */ |
| 7993 | |
| 7994 | void |
| 7995 | trees_out::add_indirects (tree decl) |
| 7996 | { |
| 7997 | unsigned count = 0; |
| 7998 | |
| 7999 | // FIXME:OPTIMIZATION We'll eventually want default fn parms of |
| 8000 | // templates and perhaps default template parms too. The former can |
| 8001 | // be referenced from instantiations (as they are lazily |
| 8002 | // instantiated). Also (deferred?) exception specifications of |
| 8003 | // templates. See the note about PARM_DECLs in trees_out::decl_node. |
| 8004 | tree inner = decl; |
| 8005 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 8006 | { |
| 8007 | count += add_indirect_tpl_parms (DECL_TEMPLATE_PARMS (decl)); |
| 8008 | |
| 8009 | inner = DECL_TEMPLATE_RESULT (decl); |
| 8010 | int tag = insert (t: inner); |
| 8011 | if (streaming_p ()) |
| 8012 | dump (dumper::TREE) |
| 8013 | && dump ("Indirect:%d template's result %C:%N" , |
| 8014 | tag, TREE_CODE (inner), inner); |
| 8015 | count++; |
| 8016 | } |
| 8017 | |
| 8018 | if (TREE_CODE (inner) == TYPE_DECL) |
| 8019 | { |
| 8020 | /* Make sure the type is in the map too. Otherwise we get |
| 8021 | different RECORD_TYPEs for the same type, and things go |
| 8022 | south. */ |
| 8023 | tree type = TREE_TYPE (inner); |
| 8024 | gcc_checking_assert (DECL_ORIGINAL_TYPE (inner) |
| 8025 | || TYPE_NAME (type) == inner); |
| 8026 | int tag = insert (t: type); |
| 8027 | if (streaming_p ()) |
| 8028 | dump (dumper::TREE) && dump ("Indirect:%d decl's type %C:%N" , tag, |
| 8029 | TREE_CODE (type), type); |
| 8030 | count++; |
| 8031 | } |
| 8032 | |
| 8033 | if (streaming_p ()) |
| 8034 | { |
| 8035 | u (v: count); |
| 8036 | dump (dumper::TREE) && dump ("Inserted %u indirects" , count); |
| 8037 | } |
| 8038 | } |
| 8039 | |
| 8040 | bool |
| 8041 | trees_in::add_indirects (tree decl) |
| 8042 | { |
| 8043 | unsigned count = 0; |
| 8044 | |
| 8045 | tree inner = decl; |
| 8046 | if (TREE_CODE (inner) == TEMPLATE_DECL) |
| 8047 | { |
| 8048 | count += add_indirect_tpl_parms (DECL_TEMPLATE_PARMS (decl)); |
| 8049 | |
| 8050 | inner = DECL_TEMPLATE_RESULT (decl); |
| 8051 | int tag = insert (t: inner); |
| 8052 | dump (dumper::TREE) |
| 8053 | && dump ("Indirect:%d templates's result %C:%N" , tag, |
| 8054 | TREE_CODE (inner), inner); |
| 8055 | count++; |
| 8056 | } |
| 8057 | |
| 8058 | if (TREE_CODE (inner) == TYPE_DECL) |
| 8059 | { |
| 8060 | tree type = TREE_TYPE (inner); |
| 8061 | gcc_checking_assert (DECL_ORIGINAL_TYPE (inner) |
| 8062 | || TYPE_NAME (type) == inner); |
| 8063 | int tag = insert (t: type); |
| 8064 | dump (dumper::TREE) |
| 8065 | && dump ("Indirect:%d decl's type %C:%N" , tag, TREE_CODE (type), type); |
| 8066 | count++; |
| 8067 | } |
| 8068 | |
| 8069 | dump (dumper::TREE) && dump ("Inserted %u indirects" , count); |
| 8070 | return count == u (); |
| 8071 | } |
| 8072 | |
| 8073 | /* Stream a template parameter. There are 4.5 kinds of parameter: |
| 8074 | a) Template - TEMPLATE_DECL->TYPE_DECL->TEMPLATE_TEMPLATE_PARM |
| 8075 | TEMPLATE_TYPE_PARM_INDEX TPI |
| 8076 | b) Type - TYPE_DECL->TEMPLATE_TYPE_PARM TEMPLATE_TYPE_PARM_INDEX TPI |
| 8077 | c.1) NonTYPE - PARM_DECL DECL_INITIAL TPI We meet this first |
| 8078 | c.2) NonTYPE - CONST_DECL DECL_INITIAL Same TPI |
| 8079 | d) BoundTemplate - TYPE_DECL->BOUND_TEMPLATE_TEMPLATE_PARM |
| 8080 | TEMPLATE_TYPE_PARM_INDEX->TPI |
| 8081 | TEMPLATE_TEMPLATE_PARM_INFO->TEMPLATE_INFO |
| 8082 | |
| 8083 | All of these point to a TEMPLATE_PARM_INDEX, and #B also has a TEMPLATE_INFO |
| 8084 | */ |
| 8085 | |
| 8086 | void |
| 8087 | trees_out::tpl_parm_value (tree parm) |
| 8088 | { |
| 8089 | gcc_checking_assert (DECL_P (parm) && DECL_TEMPLATE_PARM_P (parm)); |
| 8090 | |
| 8091 | int parm_tag = insert (t: parm); |
| 8092 | if (streaming_p ()) |
| 8093 | { |
| 8094 | i (v: tt_tpl_parm); |
| 8095 | dump (dumper::TREE) && dump ("Writing template parm:%d %C:%N" , |
| 8096 | parm_tag, TREE_CODE (parm), parm); |
| 8097 | start (t: parm); |
| 8098 | tree_node_bools (t: parm); |
| 8099 | } |
| 8100 | |
| 8101 | tree inner = parm; |
| 8102 | if (TREE_CODE (inner) == TEMPLATE_DECL) |
| 8103 | { |
| 8104 | inner = DECL_TEMPLATE_RESULT (inner); |
| 8105 | int inner_tag = insert (t: inner); |
| 8106 | if (streaming_p ()) |
| 8107 | { |
| 8108 | dump (dumper::TREE) && dump ("Writing inner template parm:%d %C:%N" , |
| 8109 | inner_tag, TREE_CODE (inner), inner); |
| 8110 | start (t: inner); |
| 8111 | tree_node_bools (t: inner); |
| 8112 | } |
| 8113 | } |
| 8114 | |
| 8115 | tree type = NULL_TREE; |
| 8116 | if (TREE_CODE (inner) == TYPE_DECL) |
| 8117 | { |
| 8118 | type = TREE_TYPE (inner); |
| 8119 | int type_tag = insert (t: type); |
| 8120 | if (streaming_p ()) |
| 8121 | { |
| 8122 | dump (dumper::TREE) && dump ("Writing template parm type:%d %C:%N" , |
| 8123 | type_tag, TREE_CODE (type), type); |
| 8124 | start (t: type); |
| 8125 | tree_node_bools (t: type); |
| 8126 | } |
| 8127 | } |
| 8128 | |
| 8129 | if (inner != parm) |
| 8130 | { |
| 8131 | /* This is a template-template parameter. */ |
| 8132 | unsigned tpl_levels = 0; |
| 8133 | tpl_header (decl: parm, tpl_levels: &tpl_levels); |
| 8134 | tpl_parms_fini (decl: parm, tpl_levels); |
| 8135 | } |
| 8136 | |
| 8137 | tree_node_vals (t: parm); |
| 8138 | if (inner != parm) |
| 8139 | tree_node_vals (t: inner); |
| 8140 | if (type) |
| 8141 | { |
| 8142 | tree_node_vals (t: type); |
| 8143 | if (DECL_NAME (inner) == auto_identifier |
| 8144 | || DECL_NAME (inner) == decltype_auto_identifier) |
| 8145 | { |
| 8146 | /* Placeholder auto. */ |
| 8147 | tree_node (DECL_INITIAL (inner)); |
| 8148 | tree_node (DECL_SIZE_UNIT (inner)); |
| 8149 | } |
| 8150 | } |
| 8151 | |
| 8152 | if (streaming_p ()) |
| 8153 | dump (dumper::TREE) && dump ("Wrote template parm:%d %C:%N" , |
| 8154 | parm_tag, TREE_CODE (parm), parm); |
| 8155 | } |
| 8156 | |
| 8157 | tree |
| 8158 | trees_in::tpl_parm_value () |
| 8159 | { |
| 8160 | tree parm = start (); |
| 8161 | if (!parm || !tree_node_bools (t: parm)) |
| 8162 | return NULL_TREE; |
| 8163 | |
| 8164 | int parm_tag = insert (t: parm); |
| 8165 | dump (dumper::TREE) && dump ("Reading template parm:%d %C:%N" , |
| 8166 | parm_tag, TREE_CODE (parm), parm); |
| 8167 | |
| 8168 | tree inner = parm; |
| 8169 | if (TREE_CODE (inner) == TEMPLATE_DECL) |
| 8170 | { |
| 8171 | inner = start (); |
| 8172 | if (!inner || !tree_node_bools (t: inner)) |
| 8173 | return NULL_TREE; |
| 8174 | int inner_tag = insert (t: inner); |
| 8175 | dump (dumper::TREE) && dump ("Reading inner template parm:%d %C:%N" , |
| 8176 | inner_tag, TREE_CODE (inner), inner); |
| 8177 | DECL_TEMPLATE_RESULT (parm) = inner; |
| 8178 | } |
| 8179 | |
| 8180 | tree type = NULL_TREE; |
| 8181 | if (TREE_CODE (inner) == TYPE_DECL) |
| 8182 | { |
| 8183 | type = start (); |
| 8184 | if (!type || !tree_node_bools (t: type)) |
| 8185 | return NULL_TREE; |
| 8186 | int type_tag = insert (t: type); |
| 8187 | dump (dumper::TREE) && dump ("Reading template parm type:%d %C:%N" , |
| 8188 | type_tag, TREE_CODE (type), type); |
| 8189 | |
| 8190 | TREE_TYPE (inner) = TREE_TYPE (parm) = type; |
| 8191 | TYPE_NAME (type) = parm; |
| 8192 | } |
| 8193 | |
| 8194 | if (inner != parm) |
| 8195 | { |
| 8196 | /* A template template parameter. */ |
| 8197 | unsigned tpl_levels = 0; |
| 8198 | tpl_header (decl: parm, tpl_levels: &tpl_levels); |
| 8199 | tpl_parms_fini (decl: parm, tpl_levels); |
| 8200 | } |
| 8201 | |
| 8202 | tree_node_vals (t: parm); |
| 8203 | if (inner != parm) |
| 8204 | tree_node_vals (t: inner); |
| 8205 | if (type) |
| 8206 | { |
| 8207 | tree_node_vals (t: type); |
| 8208 | if (DECL_NAME (inner) == auto_identifier |
| 8209 | || DECL_NAME (inner) == decltype_auto_identifier) |
| 8210 | { |
| 8211 | /* Placeholder auto. */ |
| 8212 | DECL_INITIAL (inner) = tree_node (); |
| 8213 | DECL_SIZE_UNIT (inner) = tree_node (); |
| 8214 | } |
| 8215 | if (TYPE_CANONICAL (type)) |
| 8216 | { |
| 8217 | gcc_checking_assert (TYPE_CANONICAL (type) == type); |
| 8218 | TYPE_CANONICAL (type) = canonical_type_parameter (type); |
| 8219 | } |
| 8220 | } |
| 8221 | |
| 8222 | dump (dumper::TREE) && dump ("Read template parm:%d %C:%N" , |
| 8223 | parm_tag, TREE_CODE (parm), parm); |
| 8224 | |
| 8225 | return parm; |
| 8226 | } |
| 8227 | |
| 8228 | void |
| 8229 | trees_out::install_entity (tree decl, depset *dep) |
| 8230 | { |
| 8231 | gcc_checking_assert (streaming_p ()); |
| 8232 | |
| 8233 | /* Write the entity index, so we can insert it as soon as we |
| 8234 | know this is new. */ |
| 8235 | u (v: dep ? dep->cluster + 1 : 0); |
| 8236 | if (CHECKING_P && dep) |
| 8237 | { |
| 8238 | /* Add it to the entity map, such that we can tell it is |
| 8239 | part of us. */ |
| 8240 | bool existed; |
| 8241 | unsigned *slot = &entity_map->get_or_insert |
| 8242 | (DECL_UID (decl), existed: &existed); |
| 8243 | if (existed) |
| 8244 | /* If it existed, it should match. */ |
| 8245 | gcc_checking_assert (decl == (*entity_ary)[*slot]); |
| 8246 | *slot = ~dep->cluster; |
| 8247 | } |
| 8248 | } |
| 8249 | |
| 8250 | bool |
| 8251 | trees_in::install_entity (tree decl) |
| 8252 | { |
| 8253 | unsigned entity_index = u (); |
| 8254 | if (!entity_index) |
| 8255 | return false; |
| 8256 | |
| 8257 | if (entity_index > state->entity_num) |
| 8258 | { |
| 8259 | set_overrun (); |
| 8260 | return false; |
| 8261 | } |
| 8262 | |
| 8263 | /* Insert the real decl into the entity ary. */ |
| 8264 | unsigned ident = state->entity_lwm + entity_index - 1; |
| 8265 | (*entity_ary)[ident] = decl; |
| 8266 | |
| 8267 | /* And into the entity map, if it's not already there. */ |
| 8268 | tree not_tmpl = STRIP_TEMPLATE (decl); |
| 8269 | if (!DECL_LANG_SPECIFIC (not_tmpl) |
| 8270 | || !DECL_MODULE_ENTITY_P (not_tmpl)) |
| 8271 | { |
| 8272 | /* We don't want to use retrofit_lang_decl directly so that we aren't |
| 8273 | affected by the language state when we load in. */ |
| 8274 | if (!DECL_LANG_SPECIFIC (not_tmpl)) |
| 8275 | { |
| 8276 | maybe_add_lang_decl_raw (not_tmpl, decomp_p: false); |
| 8277 | SET_DECL_LANGUAGE (not_tmpl, lang_cplusplus); |
| 8278 | } |
| 8279 | DECL_MODULE_ENTITY_P (not_tmpl) = true; |
| 8280 | |
| 8281 | /* Insert into the entity hash (it cannot already be there). */ |
| 8282 | bool existed; |
| 8283 | unsigned &slot = entity_map->get_or_insert (DECL_UID (decl), existed: &existed); |
| 8284 | gcc_checking_assert (!existed); |
| 8285 | slot = ident; |
| 8286 | } |
| 8287 | else |
| 8288 | { |
| 8289 | unsigned *slot = entity_map->get (DECL_UID (decl)); |
| 8290 | |
| 8291 | /* The entity must be in the entity map already. However, DECL may |
| 8292 | be the DECL_TEMPLATE_RESULT of an existing partial specialisation |
| 8293 | if we matched it while streaming another instantiation; in this |
| 8294 | case we already registered that TEMPLATE_DECL. */ |
| 8295 | if (!slot) |
| 8296 | { |
| 8297 | tree type = TREE_TYPE (decl); |
| 8298 | gcc_checking_assert (TREE_CODE (decl) == TYPE_DECL |
| 8299 | && CLASS_TYPE_P (type) |
| 8300 | && CLASSTYPE_TEMPLATE_SPECIALIZATION (type)); |
| 8301 | slot = entity_map->get (DECL_UID (CLASSTYPE_TI_TEMPLATE (type))); |
| 8302 | } |
| 8303 | gcc_checking_assert (slot); |
| 8304 | |
| 8305 | if (state->is_partition ()) |
| 8306 | { |
| 8307 | /* The decl is already in the entity map, but we see it again now |
| 8308 | from a partition: we want to overwrite if the original decl |
| 8309 | wasn't also from a (possibly different) partition. Otherwise, |
| 8310 | for things like template instantiations, make_dependency might |
| 8311 | not realise that this is also provided from a partition and |
| 8312 | should be considered part of this module (and thus always |
| 8313 | emitted into the primary interface's CMI). */ |
| 8314 | module_state *imp = import_entity_module (index: *slot); |
| 8315 | if (!imp->is_partition ()) |
| 8316 | *slot = ident; |
| 8317 | } |
| 8318 | } |
| 8319 | |
| 8320 | return true; |
| 8321 | } |
| 8322 | |
| 8323 | static bool has_definition (tree decl); |
| 8324 | |
| 8325 | /* DECL is a decl node that must be written by value. DEP is the |
| 8326 | decl's depset. */ |
| 8327 | |
| 8328 | void |
| 8329 | trees_out::decl_value (tree decl, depset *dep) |
| 8330 | { |
| 8331 | /* We should not be writing clones or template parms. */ |
| 8332 | gcc_checking_assert (DECL_P (decl) |
| 8333 | && !DECL_CLONED_FUNCTION_P (decl) |
| 8334 | && !DECL_TEMPLATE_PARM_P (decl)); |
| 8335 | |
| 8336 | /* We should never be writing non-typedef ptrmemfuncs by value. */ |
| 8337 | gcc_checking_assert (TREE_CODE (decl) != TYPE_DECL |
| 8338 | || DECL_ORIGINAL_TYPE (decl) |
| 8339 | || !TYPE_PTRMEMFUNC_P (TREE_TYPE (decl))); |
| 8340 | |
| 8341 | /* There's no need to walk any of the contents of a known TU-local entity, |
| 8342 | since importers should never see any of it regardless. But make sure we |
| 8343 | at least note its location so importers can use it for diagnostics. */ |
| 8344 | if (dep && dep->is_tu_local ()) |
| 8345 | { |
| 8346 | gcc_checking_assert (is_initial_scan ()); |
| 8347 | insert (t: decl, walk: WK_value); |
| 8348 | state->note_location (DECL_SOURCE_LOCATION (decl)); |
| 8349 | return; |
| 8350 | } |
| 8351 | |
| 8352 | merge_kind mk = get_merge_kind (decl, maybe_dep: dep); |
| 8353 | |
| 8354 | if (CHECKING_P) |
| 8355 | { |
| 8356 | /* Never start in the middle of a template. */ |
| 8357 | int use_tpl = -1; |
| 8358 | if (tree ti = node_template_info (decl, use&: use_tpl)) |
| 8359 | gcc_checking_assert (TREE_CODE (TI_TEMPLATE (ti)) == OVERLOAD |
| 8360 | || TREE_CODE (TI_TEMPLATE (ti)) == FIELD_DECL |
| 8361 | || (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) |
| 8362 | != decl)); |
| 8363 | } |
| 8364 | |
| 8365 | if (streaming_p ()) |
| 8366 | { |
| 8367 | /* A new node -> tt_decl. */ |
| 8368 | decl_val_count++; |
| 8369 | i (v: tt_decl); |
| 8370 | u (v: mk); |
| 8371 | start (t: decl); |
| 8372 | |
| 8373 | if (mk != MK_unique) |
| 8374 | { |
| 8375 | bits_out bits = stream_bits (); |
| 8376 | if (!(mk & MK_template_mask) && !state->is_header ()) |
| 8377 | { |
| 8378 | /* Tell the importer whether this is a global module entity, |
| 8379 | or a module entity. */ |
| 8380 | tree o = get_originating_module_decl (decl); |
| 8381 | bool is_attached = false; |
| 8382 | |
| 8383 | tree not_tmpl = STRIP_TEMPLATE (o); |
| 8384 | if (DECL_LANG_SPECIFIC (not_tmpl) |
| 8385 | && DECL_MODULE_ATTACH_P (not_tmpl)) |
| 8386 | is_attached = true; |
| 8387 | |
| 8388 | bits.b (x: is_attached); |
| 8389 | } |
| 8390 | bits.b (x: dep && dep->has_defn ()); |
| 8391 | } |
| 8392 | tree_node_bools (t: decl); |
| 8393 | } |
| 8394 | |
| 8395 | int tag = insert (t: decl, walk: WK_value); |
| 8396 | if (streaming_p ()) |
| 8397 | dump (dumper::TREE) |
| 8398 | && dump ("Writing %s:%d %C:%N%S" , merge_kind_name[mk], tag, |
| 8399 | TREE_CODE (decl), decl, decl); |
| 8400 | |
| 8401 | tree inner = decl; |
| 8402 | int inner_tag = 0; |
| 8403 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 8404 | { |
| 8405 | inner = DECL_TEMPLATE_RESULT (decl); |
| 8406 | inner_tag = insert (t: inner, walk: WK_value); |
| 8407 | |
| 8408 | /* On stream-in we assume that a template and its result will |
| 8409 | have the same type. */ |
| 8410 | gcc_checking_assert (TREE_TYPE (decl) == TREE_TYPE (inner)); |
| 8411 | |
| 8412 | if (streaming_p ()) |
| 8413 | { |
| 8414 | int code = TREE_CODE (inner); |
| 8415 | u (v: code); |
| 8416 | start (t: inner, code_streamed: true); |
| 8417 | tree_node_bools (t: inner); |
| 8418 | dump (dumper::TREE) |
| 8419 | && dump ("Writing %s:%d %C:%N%S" , merge_kind_name[mk], inner_tag, |
| 8420 | TREE_CODE (inner), inner, inner); |
| 8421 | } |
| 8422 | } |
| 8423 | |
| 8424 | tree type = NULL_TREE; |
| 8425 | int type_tag = 0; |
| 8426 | tree stub_decl = NULL_TREE; |
| 8427 | int stub_tag = 0; |
| 8428 | if (TREE_CODE (inner) == TYPE_DECL) |
| 8429 | { |
| 8430 | type = TREE_TYPE (inner); |
| 8431 | bool has_type = (type == TYPE_MAIN_VARIANT (type) |
| 8432 | && TYPE_NAME (type) == inner); |
| 8433 | |
| 8434 | if (streaming_p ()) |
| 8435 | u (v: has_type ? TREE_CODE (type) : 0); |
| 8436 | |
| 8437 | if (has_type) |
| 8438 | { |
| 8439 | type_tag = insert (t: type, walk: WK_value); |
| 8440 | if (streaming_p ()) |
| 8441 | { |
| 8442 | start (t: type, code_streamed: true); |
| 8443 | tree_node_bools (t: type); |
| 8444 | dump (dumper::TREE) |
| 8445 | && dump ("Writing type:%d %C:%N" , type_tag, |
| 8446 | TREE_CODE (type), type); |
| 8447 | } |
| 8448 | |
| 8449 | stub_decl = TYPE_STUB_DECL (type); |
| 8450 | bool has_stub = inner != stub_decl; |
| 8451 | if (streaming_p ()) |
| 8452 | u (v: has_stub ? TREE_CODE (stub_decl) : 0); |
| 8453 | if (has_stub) |
| 8454 | { |
| 8455 | stub_tag = insert (t: stub_decl); |
| 8456 | if (streaming_p ()) |
| 8457 | { |
| 8458 | start (t: stub_decl, code_streamed: true); |
| 8459 | tree_node_bools (t: stub_decl); |
| 8460 | dump (dumper::TREE) |
| 8461 | && dump ("Writing stub_decl:%d %C:%N" , stub_tag, |
| 8462 | TREE_CODE (stub_decl), stub_decl); |
| 8463 | } |
| 8464 | } |
| 8465 | else |
| 8466 | stub_decl = NULL_TREE; |
| 8467 | } |
| 8468 | else |
| 8469 | /* Regular typedef. */ |
| 8470 | type = NULL_TREE; |
| 8471 | } |
| 8472 | |
| 8473 | /* Stream the container, we want it correctly canonicalized before |
| 8474 | we start emitting keys for this decl. */ |
| 8475 | tree container = decl_container (decl); |
| 8476 | unsigned tpl_levels = 0; |
| 8477 | |
| 8478 | /* Also tell the importer whether this is a temploid friend attached |
| 8479 | to a different module (which has implications for merging), so that |
| 8480 | importers can reconstruct this information on stream-in. */ |
| 8481 | if (TREE_CODE (inner) == FUNCTION_DECL || TREE_CODE (inner) == TYPE_DECL) |
| 8482 | { |
| 8483 | tree* temploid_friend_slot = imported_temploid_friends->get (k: decl); |
| 8484 | gcc_checking_assert (!temploid_friend_slot || *temploid_friend_slot); |
| 8485 | tree_node (temploid_friend_slot ? *temploid_friend_slot : NULL_TREE); |
| 8486 | } |
| 8487 | |
| 8488 | { |
| 8489 | auto wmk = make_temp_override (var&: dep_hash->writing_merge_key, overrider: true); |
| 8490 | if (decl != inner) |
| 8491 | tpl_header (decl, tpl_levels: &tpl_levels); |
| 8492 | if (TREE_CODE (inner) == FUNCTION_DECL) |
| 8493 | fn_parms_init (inner); |
| 8494 | |
| 8495 | /* Now write out the merging information, and then really |
| 8496 | install the tag values. */ |
| 8497 | key_mergeable (tag, mk, decl, inner, container, maybe_dep: dep); |
| 8498 | |
| 8499 | if (streaming_p ()) |
| 8500 | dump (dumper::MERGE) |
| 8501 | && dump ("Wrote:%d's %s merge key %C:%N" , tag, |
| 8502 | merge_kind_name[mk], TREE_CODE (decl), decl); |
| 8503 | } |
| 8504 | |
| 8505 | if (TREE_CODE (inner) == FUNCTION_DECL) |
| 8506 | fn_parms_fini (inner); |
| 8507 | |
| 8508 | if (!is_key_order ()) |
| 8509 | tree_node_vals (t: decl); |
| 8510 | |
| 8511 | if (inner_tag) |
| 8512 | { |
| 8513 | if (!is_key_order ()) |
| 8514 | tree_node_vals (t: inner); |
| 8515 | tpl_parms_fini (decl, tpl_levels); |
| 8516 | } |
| 8517 | |
| 8518 | if (type && !is_key_order ()) |
| 8519 | { |
| 8520 | tree_node_vals (t: type); |
| 8521 | if (stub_decl) |
| 8522 | tree_node_vals (t: stub_decl); |
| 8523 | } |
| 8524 | |
| 8525 | if (!is_key_order ()) |
| 8526 | { |
| 8527 | if (mk & MK_template_mask |
| 8528 | || mk == MK_partial |
| 8529 | || mk == MK_friend_spec) |
| 8530 | { |
| 8531 | if (mk != MK_partial) |
| 8532 | { |
| 8533 | // FIXME: We should make use of the merge-key by |
| 8534 | // exposing it outside of key_mergeable. But this gets |
| 8535 | // the job done. |
| 8536 | auto *entry = reinterpret_cast <spec_entry *> (dep->deps[0]); |
| 8537 | |
| 8538 | if (streaming_p ()) |
| 8539 | u (v: get_mergeable_specialization_flags (is_decl: mk & MK_tmpl_decl_mask, |
| 8540 | tmpl: entry->tmpl, spec: decl)); |
| 8541 | tree_node (entry->tmpl); |
| 8542 | tree_node (entry->args); |
| 8543 | } |
| 8544 | else |
| 8545 | { |
| 8546 | tree ti = get_template_info (inner); |
| 8547 | tree_node (TI_TEMPLATE (ti)); |
| 8548 | tree_node (TI_ARGS (ti)); |
| 8549 | } |
| 8550 | } |
| 8551 | tree_node (get_constraints (decl)); |
| 8552 | } |
| 8553 | |
| 8554 | if (streaming_p ()) |
| 8555 | { |
| 8556 | /* Do not stray outside this section. */ |
| 8557 | gcc_checking_assert (!dep || dep->section == dep_hash->section); |
| 8558 | |
| 8559 | /* Write the entity index, so we can insert it as soon as we |
| 8560 | know this is new. */ |
| 8561 | install_entity (decl, dep); |
| 8562 | } |
| 8563 | |
| 8564 | if (DECL_LANG_SPECIFIC (inner) |
| 8565 | && DECL_MODULE_KEYED_DECLS_P (inner) |
| 8566 | && streaming_p ()) |
| 8567 | { |
| 8568 | /* Stream the keyed entities. There may be keyed entities that we |
| 8569 | choose not to stream, such as a lambda in a non-inline variable's |
| 8570 | initializer, so don't build dependencies for them here; any deps |
| 8571 | we need should be acquired during write_definition (possibly |
| 8572 | indirectly). */ |
| 8573 | auto *attach_vec = keyed_table->get (k: inner); |
| 8574 | unsigned num = attach_vec->length (); |
| 8575 | u (v: num); |
| 8576 | for (unsigned ix = 0; ix != num; ix++) |
| 8577 | { |
| 8578 | tree attached = (*attach_vec)[ix]; |
| 8579 | if (attached) |
| 8580 | { |
| 8581 | tree ti = TYPE_TEMPLATE_INFO (TREE_TYPE (attached)); |
| 8582 | if (!dep_hash->find_dependency (entity: attached) |
| 8583 | && !(ti && dep_hash->find_dependency (TI_TEMPLATE (ti)))) |
| 8584 | attached = NULL_TREE; |
| 8585 | } |
| 8586 | |
| 8587 | tree_node (attached); |
| 8588 | dump (dumper::MERGE) |
| 8589 | && dump ("Written %d[%u] attached decl %N" , tag, ix, attached); |
| 8590 | } |
| 8591 | } |
| 8592 | |
| 8593 | bool is_typedef = false; |
| 8594 | if (!type && TREE_CODE (inner) == TYPE_DECL) |
| 8595 | { |
| 8596 | tree t = TREE_TYPE (inner); |
| 8597 | unsigned tdef_flags = 0; |
| 8598 | if (DECL_ORIGINAL_TYPE (inner) |
| 8599 | && TYPE_NAME (TREE_TYPE (inner)) == inner) |
| 8600 | { |
| 8601 | tdef_flags |= 1; |
| 8602 | if (TYPE_STRUCTURAL_EQUALITY_P (t) |
| 8603 | && TYPE_DEPENDENT_P_VALID (t) |
| 8604 | && TYPE_DEPENDENT_P (t)) |
| 8605 | tdef_flags |= 2; |
| 8606 | } |
| 8607 | if (streaming_p ()) |
| 8608 | u (v: tdef_flags); |
| 8609 | |
| 8610 | if (tdef_flags & 1) |
| 8611 | { |
| 8612 | /* A typedef type. */ |
| 8613 | int type_tag = insert (t); |
| 8614 | if (streaming_p ()) |
| 8615 | dump (dumper::TREE) |
| 8616 | && dump ("Cloned:%d %s %C:%N" , type_tag, |
| 8617 | tdef_flags & 2 ? "depalias" : "typedef" , |
| 8618 | TREE_CODE (t), t); |
| 8619 | |
| 8620 | is_typedef = true; |
| 8621 | } |
| 8622 | } |
| 8623 | |
| 8624 | if (streaming_p () && DECL_MAYBE_IN_CHARGE_CDTOR_P (decl)) |
| 8625 | { |
| 8626 | bool cloned_p |
| 8627 | = (DECL_CHAIN (decl) && DECL_CLONED_FUNCTION_P (DECL_CHAIN (decl))); |
| 8628 | bool needs_vtt_parm_p |
| 8629 | = (cloned_p && CLASSTYPE_VBASECLASSES (DECL_CONTEXT (decl))); |
| 8630 | bool omit_inherited_parms_p |
| 8631 | = (cloned_p && DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (decl) |
| 8632 | && base_ctor_omit_inherited_parms (decl)); |
| 8633 | unsigned flags = (int (cloned_p) << 0 |
| 8634 | | int (needs_vtt_parm_p) << 1 |
| 8635 | | int (omit_inherited_parms_p) << 2); |
| 8636 | u (v: flags); |
| 8637 | dump (dumper::TREE) && dump ("CDTOR %N is %scloned" , |
| 8638 | decl, cloned_p ? "" : "not " ); |
| 8639 | } |
| 8640 | |
| 8641 | if (streaming_p () && VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl)) |
| 8642 | u (v: decl_tls_model (decl)); |
| 8643 | |
| 8644 | if (streaming_p ()) |
| 8645 | dump (dumper::TREE) && dump ("Written decl:%d %C:%N" , tag, |
| 8646 | TREE_CODE (decl), decl); |
| 8647 | |
| 8648 | if (NAMESPACE_SCOPE_P (inner)) |
| 8649 | gcc_checking_assert (!dep == (VAR_OR_FUNCTION_DECL_P (inner) |
| 8650 | && DECL_LOCAL_DECL_P (inner))); |
| 8651 | else if ((TREE_CODE (inner) == TYPE_DECL |
| 8652 | && !is_typedef |
| 8653 | && TYPE_NAME (TREE_TYPE (inner)) == inner) |
| 8654 | || TREE_CODE (inner) == FUNCTION_DECL) |
| 8655 | { |
| 8656 | bool write_defn = !dep && has_definition (decl); |
| 8657 | if (streaming_p ()) |
| 8658 | u (v: write_defn); |
| 8659 | if (write_defn) |
| 8660 | write_definition (decl); |
| 8661 | } |
| 8662 | } |
| 8663 | |
| 8664 | tree |
| 8665 | trees_in::decl_value () |
| 8666 | { |
| 8667 | int tag = 0; |
| 8668 | bool is_attached = false; |
| 8669 | bool has_defn = false; |
| 8670 | unsigned mk_u = u (); |
| 8671 | if (mk_u >= MK_hwm || !merge_kind_name[mk_u]) |
| 8672 | { |
| 8673 | set_overrun (); |
| 8674 | return NULL_TREE; |
| 8675 | } |
| 8676 | |
| 8677 | unsigned saved_unused = unused; |
| 8678 | unused = 0; |
| 8679 | |
| 8680 | merge_kind mk = merge_kind (mk_u); |
| 8681 | |
| 8682 | tree decl = start (); |
| 8683 | if (decl) |
| 8684 | { |
| 8685 | if (mk != MK_unique) |
| 8686 | { |
| 8687 | bits_in bits = stream_bits (); |
| 8688 | if (!(mk & MK_template_mask) && !state->is_header ()) |
| 8689 | is_attached = bits.b (); |
| 8690 | |
| 8691 | has_defn = bits.b (); |
| 8692 | } |
| 8693 | |
| 8694 | if (!tree_node_bools (t: decl)) |
| 8695 | decl = NULL_TREE; |
| 8696 | } |
| 8697 | |
| 8698 | /* Insert into map. */ |
| 8699 | tag = insert (t: decl); |
| 8700 | if (decl) |
| 8701 | dump (dumper::TREE) |
| 8702 | && dump ("Reading:%d %C" , tag, TREE_CODE (decl)); |
| 8703 | |
| 8704 | tree inner = decl; |
| 8705 | int inner_tag = 0; |
| 8706 | if (decl && TREE_CODE (decl) == TEMPLATE_DECL) |
| 8707 | { |
| 8708 | int code = u (); |
| 8709 | inner = start (code); |
| 8710 | if (inner && tree_node_bools (t: inner)) |
| 8711 | DECL_TEMPLATE_RESULT (decl) = inner; |
| 8712 | else |
| 8713 | decl = NULL_TREE; |
| 8714 | |
| 8715 | inner_tag = insert (t: inner); |
| 8716 | if (decl) |
| 8717 | dump (dumper::TREE) |
| 8718 | && dump ("Reading:%d %C" , inner_tag, TREE_CODE (inner)); |
| 8719 | } |
| 8720 | |
| 8721 | tree type = NULL_TREE; |
| 8722 | int type_tag = 0; |
| 8723 | tree stub_decl = NULL_TREE; |
| 8724 | int stub_tag = 0; |
| 8725 | if (decl && TREE_CODE (inner) == TYPE_DECL) |
| 8726 | { |
| 8727 | if (unsigned type_code = u ()) |
| 8728 | { |
| 8729 | type = start (code: type_code); |
| 8730 | if (type && tree_node_bools (t: type)) |
| 8731 | { |
| 8732 | TREE_TYPE (inner) = type; |
| 8733 | TYPE_NAME (type) = inner; |
| 8734 | } |
| 8735 | else |
| 8736 | decl = NULL_TREE; |
| 8737 | |
| 8738 | type_tag = insert (t: type); |
| 8739 | if (decl) |
| 8740 | dump (dumper::TREE) |
| 8741 | && dump ("Reading type:%d %C" , type_tag, TREE_CODE (type)); |
| 8742 | |
| 8743 | if (unsigned stub_code = u ()) |
| 8744 | { |
| 8745 | stub_decl = start (code: stub_code); |
| 8746 | if (stub_decl && tree_node_bools (t: stub_decl)) |
| 8747 | { |
| 8748 | TREE_TYPE (stub_decl) = type; |
| 8749 | TYPE_STUB_DECL (type) = stub_decl; |
| 8750 | } |
| 8751 | else |
| 8752 | decl = NULL_TREE; |
| 8753 | |
| 8754 | stub_tag = insert (t: stub_decl); |
| 8755 | if (decl) |
| 8756 | dump (dumper::TREE) |
| 8757 | && dump ("Reading stub_decl:%d %C" , stub_tag, |
| 8758 | TREE_CODE (stub_decl)); |
| 8759 | } |
| 8760 | } |
| 8761 | } |
| 8762 | |
| 8763 | if (!decl) |
| 8764 | { |
| 8765 | bail: |
| 8766 | if (inner_tag != 0) |
| 8767 | back_refs[~inner_tag] = NULL_TREE; |
| 8768 | if (type_tag != 0) |
| 8769 | back_refs[~type_tag] = NULL_TREE; |
| 8770 | if (stub_tag != 0) |
| 8771 | back_refs[~stub_tag] = NULL_TREE; |
| 8772 | if (tag != 0) |
| 8773 | back_refs[~tag] = NULL_TREE; |
| 8774 | set_overrun (); |
| 8775 | /* Bail. */ |
| 8776 | unused = saved_unused; |
| 8777 | return NULL_TREE; |
| 8778 | } |
| 8779 | |
| 8780 | /* Read the container, to ensure it's already been streamed in. */ |
| 8781 | tree container = decl_container (); |
| 8782 | unsigned tpl_levels = 0; |
| 8783 | |
| 8784 | /* If this is an imported temploid friend, get the owning decl its |
| 8785 | attachment is determined by (or NULL_TREE otherwise). */ |
| 8786 | tree temploid_friend = NULL_TREE; |
| 8787 | if (TREE_CODE (inner) == FUNCTION_DECL || TREE_CODE (inner) == TYPE_DECL) |
| 8788 | temploid_friend = tree_node (); |
| 8789 | |
| 8790 | /* Figure out if this decl is already known about. */ |
| 8791 | int parm_tag = 0; |
| 8792 | |
| 8793 | if (decl != inner) |
| 8794 | if (!tpl_header (decl, tpl_levels: &tpl_levels)) |
| 8795 | goto bail; |
| 8796 | if (TREE_CODE (inner) == FUNCTION_DECL) |
| 8797 | parm_tag = fn_parms_init (inner); |
| 8798 | |
| 8799 | tree existing = key_mergeable (tag, mk, decl, inner, type, container, |
| 8800 | is_attached, is_imported_temploid_friend: temploid_friend); |
| 8801 | tree existing_inner = existing; |
| 8802 | if (existing) |
| 8803 | { |
| 8804 | if (existing == error_mark_node) |
| 8805 | goto bail; |
| 8806 | |
| 8807 | if (TREE_CODE (STRIP_TEMPLATE (existing)) == TYPE_DECL) |
| 8808 | { |
| 8809 | tree etype = TREE_TYPE (existing); |
| 8810 | if (TYPE_LANG_SPECIFIC (etype) |
| 8811 | && COMPLETE_TYPE_P (etype) |
| 8812 | && !CLASSTYPE_MEMBER_VEC (etype)) |
| 8813 | /* Give it a member vec, we're likely gonna be looking |
| 8814 | inside it. */ |
| 8815 | set_class_bindings (etype, extra: -1); |
| 8816 | } |
| 8817 | |
| 8818 | /* Install the existing decl into the back ref array. */ |
| 8819 | register_duplicate (decl, existing); |
| 8820 | back_refs[~tag] = existing; |
| 8821 | if (inner_tag != 0) |
| 8822 | { |
| 8823 | existing_inner = DECL_TEMPLATE_RESULT (existing); |
| 8824 | back_refs[~inner_tag] = existing_inner; |
| 8825 | } |
| 8826 | |
| 8827 | if (type_tag != 0) |
| 8828 | { |
| 8829 | tree existing_type = TREE_TYPE (existing); |
| 8830 | back_refs[~type_tag] = existing_type; |
| 8831 | if (stub_tag != 0) |
| 8832 | back_refs[~stub_tag] = TYPE_STUB_DECL (existing_type); |
| 8833 | } |
| 8834 | } |
| 8835 | |
| 8836 | if (parm_tag) |
| 8837 | fn_parms_fini (tag: parm_tag, fn: inner, existing: existing_inner, has_defn); |
| 8838 | |
| 8839 | if (!tree_node_vals (t: decl)) |
| 8840 | goto bail; |
| 8841 | |
| 8842 | if (inner_tag) |
| 8843 | { |
| 8844 | gcc_checking_assert (DECL_TEMPLATE_RESULT (decl) == inner); |
| 8845 | |
| 8846 | if (!tree_node_vals (t: inner)) |
| 8847 | goto bail; |
| 8848 | |
| 8849 | if (!tpl_parms_fini (decl, tpl_levels)) |
| 8850 | goto bail; |
| 8851 | } |
| 8852 | |
| 8853 | if (type && (!tree_node_vals (t: type) |
| 8854 | || (stub_decl && !tree_node_vals (t: stub_decl)))) |
| 8855 | goto bail; |
| 8856 | |
| 8857 | spec_entry spec; |
| 8858 | unsigned spec_flags = 0; |
| 8859 | if (mk & MK_template_mask |
| 8860 | || mk == MK_partial |
| 8861 | || mk == MK_friend_spec) |
| 8862 | { |
| 8863 | if (mk == MK_partial) |
| 8864 | spec_flags = 2; |
| 8865 | else |
| 8866 | spec_flags = u (); |
| 8867 | |
| 8868 | spec.tmpl = tree_node (); |
| 8869 | spec.args = tree_node (); |
| 8870 | } |
| 8871 | /* Hold constraints on the spec field, for a short while. */ |
| 8872 | spec.spec = tree_node (); |
| 8873 | |
| 8874 | dump (dumper::TREE) && dump ("Read:%d %C:%N" , tag, TREE_CODE (decl), decl); |
| 8875 | |
| 8876 | existing = back_refs[~tag]; |
| 8877 | bool installed = install_entity (decl: existing); |
| 8878 | bool is_new = existing == decl; |
| 8879 | |
| 8880 | if (DECL_LANG_SPECIFIC (inner) |
| 8881 | && DECL_MODULE_KEYED_DECLS_P (inner)) |
| 8882 | { |
| 8883 | /* Read and maybe install the attached entities. */ |
| 8884 | bool existed; |
| 8885 | auto &set = keyed_table->get_or_insert (STRIP_TEMPLATE (existing), |
| 8886 | existed: &existed); |
| 8887 | unsigned num = u (); |
| 8888 | if (is_new == existed) |
| 8889 | set_overrun (); |
| 8890 | if (is_new) |
| 8891 | set.reserve (nelems: num); |
| 8892 | for (unsigned ix = 0; !get_overrun () && ix != num; ix++) |
| 8893 | { |
| 8894 | tree attached = tree_node (); |
| 8895 | dump (dumper::MERGE) |
| 8896 | && dump ("Read %d[%u] %s attached decl %N" , tag, ix, |
| 8897 | is_new ? "new" : "matched" , attached); |
| 8898 | if (is_new) |
| 8899 | set.quick_push (obj: attached); |
| 8900 | else if (set[ix] != attached) |
| 8901 | { |
| 8902 | if (!set[ix] || !attached) |
| 8903 | /* One import left a hole for a lambda dep we chose not |
| 8904 | to stream, but another import chose to stream that lambda. |
| 8905 | Let's not error here: hopefully we'll complain later in |
| 8906 | is_matching_decl about whatever caused us to make a |
| 8907 | different decision. */ |
| 8908 | ; |
| 8909 | else |
| 8910 | set_overrun (); |
| 8911 | } |
| 8912 | } |
| 8913 | } |
| 8914 | |
| 8915 | /* Regular typedefs will have a NULL TREE_TYPE at this point. */ |
| 8916 | unsigned tdef_flags = 0; |
| 8917 | bool is_typedef = false; |
| 8918 | if (!type && TREE_CODE (inner) == TYPE_DECL) |
| 8919 | { |
| 8920 | tdef_flags = u (); |
| 8921 | if (tdef_flags & 1) |
| 8922 | is_typedef = true; |
| 8923 | } |
| 8924 | |
| 8925 | if (is_new) |
| 8926 | { |
| 8927 | /* A newly discovered node. */ |
| 8928 | if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl)) |
| 8929 | /* Mark this identifier as naming a virtual function -- |
| 8930 | lookup_overrides relies on this optimization. */ |
| 8931 | IDENTIFIER_VIRTUAL_P (DECL_NAME (decl)) = true; |
| 8932 | |
| 8933 | if (installed) |
| 8934 | { |
| 8935 | /* Mark the entity as imported. */ |
| 8936 | retrofit_lang_decl (inner); |
| 8937 | DECL_MODULE_IMPORT_P (inner) = true; |
| 8938 | } |
| 8939 | |
| 8940 | if (temploid_friend) |
| 8941 | imported_temploid_friends->put (k: decl, v: temploid_friend); |
| 8942 | |
| 8943 | if (spec.spec) |
| 8944 | set_constraints (decl, spec.spec); |
| 8945 | |
| 8946 | if (TREE_CODE (decl) == INTEGER_CST && !TREE_OVERFLOW (decl)) |
| 8947 | { |
| 8948 | decl = cache_integer_cst (decl, might_duplicate: true); |
| 8949 | back_refs[~tag] = decl; |
| 8950 | } |
| 8951 | |
| 8952 | if (is_typedef) |
| 8953 | { |
| 8954 | /* Frob it to be ready for cloning. */ |
| 8955 | TREE_TYPE (inner) = DECL_ORIGINAL_TYPE (inner); |
| 8956 | DECL_ORIGINAL_TYPE (inner) = NULL_TREE; |
| 8957 | if (TREE_CODE (TREE_TYPE (inner)) != TU_LOCAL_ENTITY) |
| 8958 | { |
| 8959 | set_underlying_type (inner); |
| 8960 | if (tdef_flags & 2) |
| 8961 | { |
| 8962 | /* Match instantiate_alias_template's handling. */ |
| 8963 | tree type = TREE_TYPE (inner); |
| 8964 | TYPE_DEPENDENT_P (type) = true; |
| 8965 | TYPE_DEPENDENT_P_VALID (type) = true; |
| 8966 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
| 8967 | } |
| 8968 | } |
| 8969 | } |
| 8970 | |
| 8971 | if (inner_tag) |
| 8972 | /* Set the TEMPLATE_DECL's type. */ |
| 8973 | TREE_TYPE (decl) = TREE_TYPE (inner); |
| 8974 | |
| 8975 | /* Redetermine whether we need to import or export this declaration |
| 8976 | for this TU. But for extern templates we know we must import: |
| 8977 | they'll be defined in a different TU. |
| 8978 | FIXME: How do dllexport and dllimport interact across a module? |
| 8979 | See also https://github.com/itanium-cxx-abi/cxx-abi/issues/170. |
| 8980 | May have to revisit? */ |
| 8981 | if (type |
| 8982 | && CLASS_TYPE_P (type) |
| 8983 | && TYPE_LANG_SPECIFIC (type) |
| 8984 | && !(CLASSTYPE_EXPLICIT_INSTANTIATION (type) |
| 8985 | && CLASSTYPE_INTERFACE_KNOWN (type) |
| 8986 | && CLASSTYPE_INTERFACE_ONLY (type))) |
| 8987 | { |
| 8988 | CLASSTYPE_INTERFACE_ONLY (type) = false; |
| 8989 | CLASSTYPE_INTERFACE_UNKNOWN (type) = true; |
| 8990 | } |
| 8991 | |
| 8992 | /* Add to specialization tables now that constraints etc are |
| 8993 | added. */ |
| 8994 | if (mk == MK_partial) |
| 8995 | { |
| 8996 | bool is_type = TREE_CODE (inner) == TYPE_DECL; |
| 8997 | spec.spec = is_type ? type : inner; |
| 8998 | add_mergeable_specialization (is_decl: !is_type, &spec, outer: decl, spec_flags); |
| 8999 | } |
| 9000 | else if (mk & MK_template_mask) |
| 9001 | { |
| 9002 | bool is_type = !(mk & MK_tmpl_decl_mask); |
| 9003 | spec.spec = is_type ? type : mk & MK_tmpl_tmpl_mask ? inner : decl; |
| 9004 | add_mergeable_specialization (is_decl: !is_type, &spec, outer: decl, spec_flags); |
| 9005 | } |
| 9006 | |
| 9007 | if (NAMESPACE_SCOPE_P (decl) |
| 9008 | && (mk == MK_named || mk == MK_unique |
| 9009 | || mk == MK_enum || mk == MK_friend_spec) |
| 9010 | && !(VAR_OR_FUNCTION_DECL_P (decl) && DECL_LOCAL_DECL_P (decl))) |
| 9011 | add_module_namespace_decl (CP_DECL_CONTEXT (decl), decl); |
| 9012 | |
| 9013 | if (DECL_ARTIFICIAL (decl) |
| 9014 | && TREE_CODE (decl) == FUNCTION_DECL |
| 9015 | && !DECL_TEMPLATE_INFO (decl) |
| 9016 | && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl)) |
| 9017 | && TYPE_SIZE (DECL_CONTEXT (decl)) |
| 9018 | && !DECL_THUNK_P (decl)) |
| 9019 | /* A new implicit member function, when the class is |
| 9020 | complete. This means the importee declared it, and |
| 9021 | we must now add it to the class. Note that implicit |
| 9022 | member fns of template instantiations do not themselves |
| 9023 | look like templates. */ |
| 9024 | if (!install_implicit_member (decl: inner)) |
| 9025 | set_overrun (); |
| 9026 | |
| 9027 | /* When importing a TLS wrapper from a header unit, we haven't |
| 9028 | actually emitted its definition yet. Remember it so we can |
| 9029 | do this later. */ |
| 9030 | if (state->is_header () |
| 9031 | && decl_tls_wrapper_p (decl)) |
| 9032 | note_vague_linkage_fn (decl); |
| 9033 | |
| 9034 | /* Setup aliases for the declaration. */ |
| 9035 | if (tree alias = lookup_attribute (attr_name: "alias" , DECL_ATTRIBUTES (decl))) |
| 9036 | { |
| 9037 | alias = TREE_VALUE (TREE_VALUE (alias)); |
| 9038 | alias = get_identifier (TREE_STRING_POINTER (alias)); |
| 9039 | assemble_alias (decl, alias); |
| 9040 | } |
| 9041 | } |
| 9042 | else |
| 9043 | { |
| 9044 | /* DECL is the to-be-discarded decl. Its internal pointers will |
| 9045 | be to the EXISTING's structure. Frob it to point to its |
| 9046 | own other structures, so loading its definition will alter |
| 9047 | it, and not the existing decl. */ |
| 9048 | dump (dumper::MERGE) && dump ("Deduping %N" , existing); |
| 9049 | |
| 9050 | if (inner_tag) |
| 9051 | DECL_TEMPLATE_RESULT (decl) = inner; |
| 9052 | |
| 9053 | if (type) |
| 9054 | { |
| 9055 | /* Point at the to-be-discarded type & decl. */ |
| 9056 | TYPE_NAME (type) = inner; |
| 9057 | TREE_TYPE (inner) = type; |
| 9058 | |
| 9059 | TYPE_STUB_DECL (type) = stub_decl ? stub_decl : inner; |
| 9060 | if (stub_decl) |
| 9061 | TREE_TYPE (stub_decl) = type; |
| 9062 | |
| 9063 | tree etype = TREE_TYPE (existing); |
| 9064 | |
| 9065 | /* Handle separate declarations with different attributes. */ |
| 9066 | tree &dattr = TYPE_ATTRIBUTES (type); |
| 9067 | tree &eattr = TYPE_ATTRIBUTES (etype); |
| 9068 | check_abi_tags (existing, decl, eattr, dattr); |
| 9069 | // TODO: handle other conflicting type attributes |
| 9070 | eattr = merge_attributes (eattr, dattr); |
| 9071 | |
| 9072 | /* When merging a partial specialisation, the existing decl may have |
| 9073 | had its TYPE_CANONICAL adjusted. If so we should use structural |
| 9074 | equality to ensure is_matching_decl doesn't get confused. */ |
| 9075 | if ((spec_flags & 2) |
| 9076 | && TYPE_CANONICAL (type) != TYPE_CANONICAL (etype)) |
| 9077 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
| 9078 | } |
| 9079 | |
| 9080 | if (inner_tag) |
| 9081 | /* Set the TEMPLATE_DECL's type. */ |
| 9082 | TREE_TYPE (decl) = TREE_TYPE (inner); |
| 9083 | |
| 9084 | if (!is_matching_decl (existing, decl, is_typedef)) |
| 9085 | unmatched_duplicate (existing); |
| 9086 | |
| 9087 | if (TREE_CODE (inner) == FUNCTION_DECL) |
| 9088 | { |
| 9089 | tree e_inner = STRIP_TEMPLATE (existing); |
| 9090 | for (auto parm = DECL_ARGUMENTS (inner); |
| 9091 | parm; parm = DECL_CHAIN (parm)) |
| 9092 | DECL_CONTEXT (parm) = e_inner; |
| 9093 | } |
| 9094 | |
| 9095 | /* And our result is the existing node. */ |
| 9096 | decl = existing; |
| 9097 | } |
| 9098 | |
| 9099 | if (mk == MK_friend_spec) |
| 9100 | { |
| 9101 | tree e = match_mergeable_specialization (is_decl: true, &spec); |
| 9102 | if (!e) |
| 9103 | { |
| 9104 | spec.spec = inner; |
| 9105 | add_mergeable_specialization (is_decl: true, &spec, outer: decl, spec_flags); |
| 9106 | } |
| 9107 | else if (e != existing) |
| 9108 | set_overrun (); |
| 9109 | } |
| 9110 | |
| 9111 | if (is_typedef) |
| 9112 | { |
| 9113 | /* Insert the type into the array now. */ |
| 9114 | tag = insert (TREE_TYPE (decl)); |
| 9115 | dump (dumper::TREE) |
| 9116 | && dump ("Cloned:%d typedef %C:%N" , |
| 9117 | tag, TREE_CODE (TREE_TYPE (decl)), TREE_TYPE (decl)); |
| 9118 | } |
| 9119 | |
| 9120 | unused = saved_unused; |
| 9121 | |
| 9122 | if (DECL_MAYBE_IN_CHARGE_CDTOR_P (decl)) |
| 9123 | { |
| 9124 | unsigned flags = u (); |
| 9125 | |
| 9126 | if (is_new) |
| 9127 | { |
| 9128 | bool cloned_p = flags & 1; |
| 9129 | dump (dumper::TREE) && dump ("CDTOR %N is %scloned" , |
| 9130 | decl, cloned_p ? "" : "not " ); |
| 9131 | if (cloned_p) |
| 9132 | { |
| 9133 | /* Update the member vec, if there is one (we're in a different |
| 9134 | cluster to the class defn) and this isn't a primary template |
| 9135 | specialization (as in tsubst_function_decl). */ |
| 9136 | bool up = (CLASSTYPE_MEMBER_VEC (DECL_CONTEXT (decl)) |
| 9137 | && !primary_template_specialization_p (decl)); |
| 9138 | build_cdtor_clones (decl, flags & 2, flags & 4, up); |
| 9139 | } |
| 9140 | } |
| 9141 | } |
| 9142 | |
| 9143 | if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl)) |
| 9144 | { |
| 9145 | enum tls_model model = tls_model (u ()); |
| 9146 | if (is_new) |
| 9147 | set_decl_tls_model (decl, model); |
| 9148 | } |
| 9149 | |
| 9150 | if (!NAMESPACE_SCOPE_P (inner) |
| 9151 | && ((TREE_CODE (inner) == TYPE_DECL |
| 9152 | && !is_typedef |
| 9153 | && TYPE_NAME (TREE_TYPE (inner)) == inner) |
| 9154 | || TREE_CODE (inner) == FUNCTION_DECL) |
| 9155 | && u ()) |
| 9156 | read_definition (decl); |
| 9157 | |
| 9158 | return decl; |
| 9159 | } |
| 9160 | |
| 9161 | /* DECL is an unnameable member of CTX. Return a suitable identifying |
| 9162 | index. */ |
| 9163 | |
| 9164 | static unsigned |
| 9165 | get_field_ident (tree ctx, tree decl) |
| 9166 | { |
| 9167 | gcc_checking_assert (TREE_CODE (decl) == USING_DECL |
| 9168 | || !DECL_NAME (decl) |
| 9169 | || IDENTIFIER_ANON_P (DECL_NAME (decl))); |
| 9170 | |
| 9171 | unsigned ix = 0; |
| 9172 | for (tree fields = TYPE_FIELDS (ctx); |
| 9173 | fields; fields = DECL_CHAIN (fields)) |
| 9174 | { |
| 9175 | if (fields == decl) |
| 9176 | return ix; |
| 9177 | |
| 9178 | if (DECL_CONTEXT (fields) == ctx |
| 9179 | && (TREE_CODE (fields) == USING_DECL |
| 9180 | || (TREE_CODE (fields) == FIELD_DECL |
| 9181 | && (!DECL_NAME (fields) |
| 9182 | || IDENTIFIER_ANON_P (DECL_NAME (fields)))))) |
| 9183 | /* Count this field. */ |
| 9184 | ix++; |
| 9185 | } |
| 9186 | gcc_unreachable (); |
| 9187 | } |
| 9188 | |
| 9189 | static tree |
| 9190 | lookup_field_ident (tree ctx, unsigned ix) |
| 9191 | { |
| 9192 | for (tree fields = TYPE_FIELDS (ctx); |
| 9193 | fields; fields = DECL_CHAIN (fields)) |
| 9194 | if (DECL_CONTEXT (fields) == ctx |
| 9195 | && (TREE_CODE (fields) == USING_DECL |
| 9196 | || (TREE_CODE (fields) == FIELD_DECL |
| 9197 | && (!DECL_NAME (fields) |
| 9198 | || IDENTIFIER_ANON_P (DECL_NAME (fields)))))) |
| 9199 | if (!ix--) |
| 9200 | return fields; |
| 9201 | |
| 9202 | return NULL_TREE; |
| 9203 | } |
| 9204 | |
| 9205 | /* Reference DECL. REF indicates the walk kind we are performing. |
| 9206 | Return true if we should write this decl by value. */ |
| 9207 | |
| 9208 | bool |
| 9209 | trees_out::decl_node (tree decl, walk_kind ref) |
| 9210 | { |
| 9211 | gcc_checking_assert (DECL_P (decl) && !DECL_TEMPLATE_PARM_P (decl) |
| 9212 | && DECL_CONTEXT (decl)); |
| 9213 | |
| 9214 | if (ref == WK_value) |
| 9215 | { |
| 9216 | depset *dep = dep_hash->find_dependency (entity: decl); |
| 9217 | decl_value (decl, dep); |
| 9218 | return false; |
| 9219 | } |
| 9220 | |
| 9221 | switch (TREE_CODE (decl)) |
| 9222 | { |
| 9223 | default: |
| 9224 | break; |
| 9225 | |
| 9226 | case FUNCTION_DECL: |
| 9227 | gcc_checking_assert (!DECL_LOCAL_DECL_P (decl)); |
| 9228 | break; |
| 9229 | |
| 9230 | case RESULT_DECL: |
| 9231 | /* Unlike PARM_DECLs, RESULT_DECLs are only generated and |
| 9232 | referenced when we're inside the function itself. */ |
| 9233 | return true; |
| 9234 | |
| 9235 | case PARM_DECL: |
| 9236 | { |
| 9237 | if (streaming_p ()) |
| 9238 | i (v: tt_parm); |
| 9239 | tree_node (DECL_CONTEXT (decl)); |
| 9240 | |
| 9241 | /* That must have put this in the map. */ |
| 9242 | walk_kind ref = ref_node (t: decl); |
| 9243 | if (ref != WK_none) |
| 9244 | // FIXME:OPTIMIZATION We can wander into bits of the |
| 9245 | // template this was instantiated from, for instance |
| 9246 | // deferred noexcept and default parms, or references |
| 9247 | // to parms from earlier forward-decls (PR c++/119608). |
| 9248 | // |
| 9249 | // Currently we'll end up cloning those bits of tree. |
| 9250 | // It would be nice to reference those specific nodes. |
| 9251 | // I think putting those things in the map when we |
| 9252 | // reference their template by name. |
| 9253 | // |
| 9254 | // See the note in add_indirects. |
| 9255 | return true; |
| 9256 | |
| 9257 | if (streaming_p ()) |
| 9258 | dump (dumper::TREE) |
| 9259 | && dump ("Wrote %s reference %N" , |
| 9260 | TREE_CODE (decl) == PARM_DECL ? "parameter" : "result" , |
| 9261 | decl); |
| 9262 | } |
| 9263 | return false; |
| 9264 | |
| 9265 | case IMPORTED_DECL: |
| 9266 | /* This describes a USING_DECL to the ME's debug machinery. It |
| 9267 | originates from the fortran FE, and has nothing to do with |
| 9268 | C++ modules. */ |
| 9269 | return true; |
| 9270 | |
| 9271 | case LABEL_DECL: |
| 9272 | return true; |
| 9273 | |
| 9274 | case CONST_DECL: |
| 9275 | { |
| 9276 | /* If I end up cloning enum decls, implementing C++20 using |
| 9277 | E::v, this will need tweaking. */ |
| 9278 | if (streaming_p ()) |
| 9279 | i (v: tt_enum_decl); |
| 9280 | tree ctx = DECL_CONTEXT (decl); |
| 9281 | gcc_checking_assert (TREE_CODE (ctx) == ENUMERAL_TYPE); |
| 9282 | tree_node (ctx); |
| 9283 | tree_node (DECL_NAME (decl)); |
| 9284 | |
| 9285 | int tag = insert (t: decl); |
| 9286 | if (streaming_p ()) |
| 9287 | dump (dumper::TREE) |
| 9288 | && dump ("Wrote enum decl:%d %C:%N" , tag, TREE_CODE (decl), decl); |
| 9289 | return false; |
| 9290 | } |
| 9291 | break; |
| 9292 | |
| 9293 | case USING_DECL: |
| 9294 | if (TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL) |
| 9295 | break; |
| 9296 | /* FALLTHROUGH */ |
| 9297 | |
| 9298 | case FIELD_DECL: |
| 9299 | { |
| 9300 | if (streaming_p ()) |
| 9301 | i (v: tt_data_member); |
| 9302 | |
| 9303 | tree ctx = DECL_CONTEXT (decl); |
| 9304 | tree_node (ctx); |
| 9305 | |
| 9306 | tree name = NULL_TREE; |
| 9307 | |
| 9308 | if (TREE_CODE (decl) == USING_DECL) |
| 9309 | ; |
| 9310 | else |
| 9311 | { |
| 9312 | name = DECL_NAME (decl); |
| 9313 | if (name && IDENTIFIER_ANON_P (name)) |
| 9314 | name = NULL_TREE; |
| 9315 | } |
| 9316 | |
| 9317 | tree_node (name); |
| 9318 | if (!name && streaming_p ()) |
| 9319 | { |
| 9320 | unsigned ix = get_field_ident (ctx, decl); |
| 9321 | u (v: ix); |
| 9322 | } |
| 9323 | |
| 9324 | int tag = insert (t: decl); |
| 9325 | if (streaming_p ()) |
| 9326 | dump (dumper::TREE) |
| 9327 | && dump ("Wrote member:%d %C:%N" , tag, TREE_CODE (decl), decl); |
| 9328 | return false; |
| 9329 | } |
| 9330 | break; |
| 9331 | |
| 9332 | case VAR_DECL: |
| 9333 | gcc_checking_assert (!DECL_LOCAL_DECL_P (decl)); |
| 9334 | if (DECL_VTABLE_OR_VTT_P (decl)) |
| 9335 | { |
| 9336 | /* VTT or VTABLE, they are all on the vtables list. */ |
| 9337 | tree ctx = CP_DECL_CONTEXT (decl); |
| 9338 | tree vtable = CLASSTYPE_VTABLES (ctx); |
| 9339 | for (unsigned ix = 0; ; vtable = DECL_CHAIN (vtable), ix++) |
| 9340 | if (vtable == decl) |
| 9341 | { |
| 9342 | gcc_checking_assert (DECL_VIRTUAL_P (decl)); |
| 9343 | if (streaming_p ()) |
| 9344 | { |
| 9345 | u (v: tt_vtable); |
| 9346 | u (v: ix); |
| 9347 | dump (dumper::TREE) |
| 9348 | && dump ("Writing vtable %N[%u]" , ctx, ix); |
| 9349 | } |
| 9350 | tree_node (ctx); |
| 9351 | return false; |
| 9352 | } |
| 9353 | gcc_unreachable (); |
| 9354 | } |
| 9355 | |
| 9356 | if (DECL_TINFO_P (decl)) |
| 9357 | { |
| 9358 | tinfo: |
| 9359 | /* A typeinfo, tt_tinfo_typedef or tt_tinfo_var. */ |
| 9360 | bool is_var = VAR_P (decl); |
| 9361 | tree type = TREE_TYPE (decl); |
| 9362 | unsigned ix = get_pseudo_tinfo_index (type); |
| 9363 | if (streaming_p ()) |
| 9364 | { |
| 9365 | i (v: is_var ? tt_tinfo_var : tt_tinfo_typedef); |
| 9366 | u (v: ix); |
| 9367 | } |
| 9368 | |
| 9369 | if (is_var) |
| 9370 | { |
| 9371 | /* We also need the type it is for and mangled name, so |
| 9372 | the reader doesn't need to complete the type (which |
| 9373 | would break section ordering). The type it is for is |
| 9374 | stashed on the name's TREE_TYPE. */ |
| 9375 | tree name = DECL_NAME (decl); |
| 9376 | tree_node (name); |
| 9377 | type = TREE_TYPE (name); |
| 9378 | tree_node (type); |
| 9379 | } |
| 9380 | |
| 9381 | int tag = insert (t: decl); |
| 9382 | if (streaming_p ()) |
| 9383 | dump (dumper::TREE) |
| 9384 | && dump ("Wrote tinfo_%s:%d %u %N" , is_var ? "var" : "type" , |
| 9385 | tag, ix, type); |
| 9386 | |
| 9387 | if (!is_var) |
| 9388 | { |
| 9389 | tag = insert (t: type); |
| 9390 | if (streaming_p ()) |
| 9391 | dump (dumper::TREE) |
| 9392 | && dump ("Wrote tinfo_type:%d %u %N" , tag, ix, type); |
| 9393 | } |
| 9394 | return false; |
| 9395 | } |
| 9396 | |
| 9397 | if (DECL_NTTP_OBJECT_P (decl)) |
| 9398 | { |
| 9399 | /* A NTTP parm object. */ |
| 9400 | if (streaming_p ()) |
| 9401 | i (v: tt_nttp_var); |
| 9402 | tree_node (tparm_object_argument (decl)); |
| 9403 | tree_node (DECL_NAME (decl)); |
| 9404 | int tag = insert (t: decl); |
| 9405 | if (streaming_p ()) |
| 9406 | dump (dumper::TREE) |
| 9407 | && dump ("Wrote nttp object:%d %N" , tag, DECL_NAME (decl)); |
| 9408 | return false; |
| 9409 | } |
| 9410 | |
| 9411 | break; |
| 9412 | |
| 9413 | case TYPE_DECL: |
| 9414 | if (DECL_TINFO_P (decl)) |
| 9415 | goto tinfo; |
| 9416 | break; |
| 9417 | } |
| 9418 | |
| 9419 | if (DECL_THUNK_P (decl)) |
| 9420 | { |
| 9421 | /* Thunks are similar to binfos -- write the thunked-to decl and |
| 9422 | then thunk-specific key info. */ |
| 9423 | if (streaming_p ()) |
| 9424 | { |
| 9425 | i (v: tt_thunk); |
| 9426 | i (THUNK_FIXED_OFFSET (decl)); |
| 9427 | } |
| 9428 | |
| 9429 | tree target = decl; |
| 9430 | while (DECL_THUNK_P (target)) |
| 9431 | target = THUNK_TARGET (target); |
| 9432 | tree_node (target); |
| 9433 | tree_node (THUNK_VIRTUAL_OFFSET (decl)); |
| 9434 | int tag = insert (t: decl); |
| 9435 | if (streaming_p ()) |
| 9436 | dump (dumper::TREE) |
| 9437 | && dump ("Wrote:%d thunk %N to %N" , tag, DECL_NAME (decl), target); |
| 9438 | return false; |
| 9439 | } |
| 9440 | |
| 9441 | if (DECL_CLONED_FUNCTION_P (decl)) |
| 9442 | { |
| 9443 | tree target = get_clone_target (decl); |
| 9444 | if (streaming_p ()) |
| 9445 | i (v: tt_clone_ref); |
| 9446 | |
| 9447 | tree_node (target); |
| 9448 | tree_node (DECL_NAME (decl)); |
| 9449 | if (DECL_VIRTUAL_P (decl)) |
| 9450 | tree_node (DECL_VINDEX (decl)); |
| 9451 | int tag = insert (t: decl); |
| 9452 | if (streaming_p ()) |
| 9453 | dump (dumper::TREE) |
| 9454 | && dump ("Wrote:%d clone %N of %N" , tag, DECL_NAME (decl), target); |
| 9455 | return false; |
| 9456 | } |
| 9457 | |
| 9458 | /* Everything left should be a thing that is in the entity table. |
| 9459 | Mostly things that can be defined outside of their (original |
| 9460 | declaration) context. */ |
| 9461 | gcc_checking_assert (TREE_CODE (decl) == TEMPLATE_DECL |
| 9462 | || VAR_P (decl) |
| 9463 | || TREE_CODE (decl) == FUNCTION_DECL |
| 9464 | || TREE_CODE (decl) == TYPE_DECL |
| 9465 | || TREE_CODE (decl) == USING_DECL |
| 9466 | || TREE_CODE (decl) == CONCEPT_DECL |
| 9467 | || TREE_CODE (decl) == NAMESPACE_DECL); |
| 9468 | |
| 9469 | int use_tpl = -1; |
| 9470 | tree ti = node_template_info (decl, use&: use_tpl); |
| 9471 | tree tpl = NULL_TREE; |
| 9472 | |
| 9473 | /* If this is the TEMPLATE_DECL_RESULT of a TEMPLATE_DECL, get the |
| 9474 | TEMPLATE_DECL. Note TI_TEMPLATE is not a TEMPLATE_DECL for |
| 9475 | (some) friends, so we need to check that. */ |
| 9476 | // FIXME: Should local friend template specializations be by value? |
| 9477 | // They don't get idents so we'll never know they're imported, but I |
| 9478 | // think we can only reach them from the TU that defines the |
| 9479 | // befriending class? |
| 9480 | if (ti && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL |
| 9481 | && DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == decl) |
| 9482 | { |
| 9483 | tpl = TI_TEMPLATE (ti); |
| 9484 | partial_template: |
| 9485 | if (streaming_p ()) |
| 9486 | { |
| 9487 | i (v: tt_template); |
| 9488 | dump (dumper::TREE) |
| 9489 | && dump ("Writing implicit template %C:%N%S" , |
| 9490 | TREE_CODE (tpl), tpl, tpl); |
| 9491 | } |
| 9492 | tree_node (tpl); |
| 9493 | |
| 9494 | /* Streaming TPL caused us to visit DECL and maybe its type, |
| 9495 | if it wasn't TU-local. */ |
| 9496 | if (CHECKING_P && !has_tu_local_dep (tpl)) |
| 9497 | { |
| 9498 | gcc_checking_assert (TREE_VISITED (decl)); |
| 9499 | if (DECL_IMPLICIT_TYPEDEF_P (decl)) |
| 9500 | gcc_checking_assert (TREE_VISITED (TREE_TYPE (decl))); |
| 9501 | } |
| 9502 | return false; |
| 9503 | } |
| 9504 | |
| 9505 | tree ctx = CP_DECL_CONTEXT (decl); |
| 9506 | depset *dep = NULL; |
| 9507 | if (streaming_p ()) |
| 9508 | dep = dep_hash->find_dependency (entity: decl); |
| 9509 | else if (TREE_CODE (ctx) != FUNCTION_DECL |
| 9510 | || TREE_CODE (decl) == TEMPLATE_DECL |
| 9511 | || DECL_IMPLICIT_TYPEDEF_P (decl) |
| 9512 | || (DECL_LANG_SPECIFIC (decl) |
| 9513 | && DECL_MODULE_IMPORT_P (decl))) |
| 9514 | { |
| 9515 | auto kind = (TREE_CODE (decl) == NAMESPACE_DECL |
| 9516 | && !DECL_NAMESPACE_ALIAS (decl) |
| 9517 | ? depset::EK_NAMESPACE : depset::EK_DECL); |
| 9518 | dep = dep_hash->add_dependency (decl, kind); |
| 9519 | } |
| 9520 | |
| 9521 | if (!dep || dep->is_tu_local ()) |
| 9522 | { |
| 9523 | /* Some internal entity of context. Do by value. */ |
| 9524 | decl_value (decl, dep); |
| 9525 | return false; |
| 9526 | } |
| 9527 | |
| 9528 | if (dep->get_entity_kind () == depset::EK_REDIRECT) |
| 9529 | { |
| 9530 | /* The DECL_TEMPLATE_RESULT of a partial specialization. |
| 9531 | Write the partial specialization's template. */ |
| 9532 | depset *redirect = dep->deps[0]; |
| 9533 | gcc_checking_assert (redirect->get_entity_kind () == depset::EK_PARTIAL); |
| 9534 | tpl = redirect->get_entity (); |
| 9535 | goto partial_template; |
| 9536 | } |
| 9537 | |
| 9538 | if (streaming_p ()) |
| 9539 | { |
| 9540 | /* Locate the entity. */ |
| 9541 | unsigned index = dep->cluster; |
| 9542 | unsigned import = 0; |
| 9543 | |
| 9544 | if (dep->is_import ()) |
| 9545 | import = dep->section; |
| 9546 | else if (CHECKING_P) |
| 9547 | /* It should be what we put there. */ |
| 9548 | gcc_checking_assert (index == ~import_entity_index (decl)); |
| 9549 | |
| 9550 | #if CHECKING_P |
| 9551 | gcc_assert (!import || importedness >= 0); |
| 9552 | #endif |
| 9553 | i (v: tt_entity); |
| 9554 | u (v: import); |
| 9555 | u (v: index); |
| 9556 | } |
| 9557 | |
| 9558 | int tag = insert (t: decl); |
| 9559 | if (streaming_p () && dump (dumper::TREE)) |
| 9560 | { |
| 9561 | char const *kind = "import" ; |
| 9562 | module_state *from = this_module (); |
| 9563 | if (dep->is_import ()) |
| 9564 | /* Rediscover the unremapped index. */ |
| 9565 | from = import_entity_module (index: import_entity_index (decl)); |
| 9566 | else |
| 9567 | { |
| 9568 | tree o = get_originating_module_decl (decl); |
| 9569 | o = STRIP_TEMPLATE (o); |
| 9570 | kind = (DECL_LANG_SPECIFIC (o) && DECL_MODULE_PURVIEW_P (o) |
| 9571 | ? "purview" : "GMF" ); |
| 9572 | } |
| 9573 | dump ("Wrote %s:%d %C:%N@%M" , kind, |
| 9574 | tag, TREE_CODE (decl), decl, from); |
| 9575 | } |
| 9576 | |
| 9577 | add_indirects (decl); |
| 9578 | |
| 9579 | return false; |
| 9580 | } |
| 9581 | |
| 9582 | void |
| 9583 | trees_out::type_node (tree type) |
| 9584 | { |
| 9585 | gcc_assert (TYPE_P (type)); |
| 9586 | |
| 9587 | tree root = (TYPE_NAME (type) |
| 9588 | ? TREE_TYPE (TYPE_NAME (type)) : TYPE_MAIN_VARIANT (type)); |
| 9589 | gcc_checking_assert (root); |
| 9590 | |
| 9591 | if (type != root) |
| 9592 | { |
| 9593 | if (streaming_p ()) |
| 9594 | i (v: tt_variant_type); |
| 9595 | tree_node (root); |
| 9596 | |
| 9597 | int flags = -1; |
| 9598 | |
| 9599 | if (TREE_CODE (type) == FUNCTION_TYPE |
| 9600 | || TREE_CODE (type) == METHOD_TYPE) |
| 9601 | { |
| 9602 | int quals = type_memfn_quals (type); |
| 9603 | int rquals = type_memfn_rqual (type); |
| 9604 | tree raises = TYPE_RAISES_EXCEPTIONS (type); |
| 9605 | bool late = TYPE_HAS_LATE_RETURN_TYPE (type); |
| 9606 | |
| 9607 | if (raises != TYPE_RAISES_EXCEPTIONS (root) |
| 9608 | || rquals != type_memfn_rqual (root) |
| 9609 | || quals != type_memfn_quals (root) |
| 9610 | || late != TYPE_HAS_LATE_RETURN_TYPE (root)) |
| 9611 | flags = rquals | (int (late) << 2) | (quals << 3); |
| 9612 | } |
| 9613 | else |
| 9614 | { |
| 9615 | if (TYPE_USER_ALIGN (type)) |
| 9616 | flags = TYPE_ALIGN_RAW (type); |
| 9617 | } |
| 9618 | |
| 9619 | if (streaming_p ()) |
| 9620 | i (v: flags); |
| 9621 | |
| 9622 | if (flags < 0) |
| 9623 | ; |
| 9624 | else if (TREE_CODE (type) == FUNCTION_TYPE |
| 9625 | || TREE_CODE (type) == METHOD_TYPE) |
| 9626 | { |
| 9627 | tree raises = TYPE_RAISES_EXCEPTIONS (type); |
| 9628 | if (raises == TYPE_RAISES_EXCEPTIONS (root)) |
| 9629 | raises = error_mark_node; |
| 9630 | tree_node (raises); |
| 9631 | } |
| 9632 | |
| 9633 | /* build_type_attribute_variant creates a new TYPE_MAIN_VARIANT, so |
| 9634 | variants should all have the same set of attributes. */ |
| 9635 | gcc_checking_assert (TYPE_ATTRIBUTES (type) |
| 9636 | == TYPE_ATTRIBUTES (TYPE_MAIN_VARIANT (type))); |
| 9637 | |
| 9638 | if (streaming_p ()) |
| 9639 | { |
| 9640 | /* Qualifiers. */ |
| 9641 | int rquals = cp_type_quals (root); |
| 9642 | int quals = cp_type_quals (type); |
| 9643 | if (quals == rquals) |
| 9644 | quals = -1; |
| 9645 | i (v: quals); |
| 9646 | } |
| 9647 | |
| 9648 | if (ref_node (t: type) != WK_none) |
| 9649 | { |
| 9650 | int tag = insert (t: type); |
| 9651 | if (streaming_p ()) |
| 9652 | { |
| 9653 | i (v: 0); |
| 9654 | dump (dumper::TREE) |
| 9655 | && dump ("Wrote:%d variant type %C" , tag, TREE_CODE (type)); |
| 9656 | } |
| 9657 | } |
| 9658 | return; |
| 9659 | } |
| 9660 | |
| 9661 | if (tree name = TYPE_NAME (type)) |
| 9662 | if ((TREE_CODE (name) == TYPE_DECL && DECL_ORIGINAL_TYPE (name)) |
| 9663 | || DECL_TEMPLATE_PARM_P (name) |
| 9664 | || TREE_CODE (type) == RECORD_TYPE |
| 9665 | || TREE_CODE (type) == UNION_TYPE |
| 9666 | || TREE_CODE (type) == ENUMERAL_TYPE) |
| 9667 | { |
| 9668 | gcc_checking_assert (DECL_P (name)); |
| 9669 | |
| 9670 | /* We can meet template parms that we didn't meet in the |
| 9671 | tpl_parms walk, because we're referring to a derived type |
| 9672 | that was previously constructed from equivalent template |
| 9673 | parms. */ |
| 9674 | if (streaming_p ()) |
| 9675 | { |
| 9676 | i (v: tt_typedef_type); |
| 9677 | dump (dumper::TREE) |
| 9678 | && dump ("Writing %stypedef %C:%N" , |
| 9679 | DECL_IMPLICIT_TYPEDEF_P (name) ? "implicit " : "" , |
| 9680 | TREE_CODE (name), name); |
| 9681 | } |
| 9682 | tree_node (name); |
| 9683 | if (streaming_p ()) |
| 9684 | dump (dumper::TREE) && dump ("Wrote typedef %C:%N%S" , |
| 9685 | TREE_CODE (name), name, name); |
| 9686 | |
| 9687 | /* We'll have either visited this type or have newly discovered |
| 9688 | that it's TU-local; either way we won't need to visit it again. */ |
| 9689 | gcc_checking_assert (TREE_VISITED (type) || has_tu_local_dep (name)); |
| 9690 | return; |
| 9691 | } |
| 9692 | |
| 9693 | if (TYPE_PTRMEMFUNC_P (type)) |
| 9694 | { |
| 9695 | /* This is a distinct type node, masquerading as a structure. */ |
| 9696 | tree fn_type = TYPE_PTRMEMFUNC_FN_TYPE (type); |
| 9697 | if (streaming_p ()) |
| 9698 | i (v: tt_ptrmem_type); |
| 9699 | tree_node (fn_type); |
| 9700 | int tag = insert (t: type); |
| 9701 | if (streaming_p ()) |
| 9702 | dump (dumper::TREE) && dump ("Written:%d ptrmem type" , tag); |
| 9703 | return; |
| 9704 | } |
| 9705 | |
| 9706 | if (streaming_p ()) |
| 9707 | { |
| 9708 | u (v: tt_derived_type); |
| 9709 | u (TREE_CODE (type)); |
| 9710 | } |
| 9711 | |
| 9712 | tree_node (TREE_TYPE (type)); |
| 9713 | switch (TREE_CODE (type)) |
| 9714 | { |
| 9715 | default: |
| 9716 | /* We should never meet a type here that is indescribable in |
| 9717 | terms of other types. */ |
| 9718 | gcc_unreachable (); |
| 9719 | |
| 9720 | case ARRAY_TYPE: |
| 9721 | tree_node (TYPE_DOMAIN (type)); |
| 9722 | if (streaming_p ()) |
| 9723 | /* Dependent arrays are constructed with TYPE_DEPENENT_P |
| 9724 | already set. */ |
| 9725 | u (TYPE_DEPENDENT_P (type)); |
| 9726 | break; |
| 9727 | |
| 9728 | case COMPLEX_TYPE: |
| 9729 | /* No additional data. */ |
| 9730 | break; |
| 9731 | |
| 9732 | case BOOLEAN_TYPE: |
| 9733 | /* A non-standard boolean type. */ |
| 9734 | if (streaming_p ()) |
| 9735 | u (TYPE_PRECISION (type)); |
| 9736 | break; |
| 9737 | |
| 9738 | case INTEGER_TYPE: |
| 9739 | if (TREE_TYPE (type)) |
| 9740 | { |
| 9741 | /* A range type (representing an array domain). */ |
| 9742 | tree_node (TYPE_MIN_VALUE (type)); |
| 9743 | tree_node (TYPE_MAX_VALUE (type)); |
| 9744 | } |
| 9745 | else |
| 9746 | { |
| 9747 | /* A new integral type (representing a bitfield). */ |
| 9748 | if (streaming_p ()) |
| 9749 | { |
| 9750 | unsigned prec = TYPE_PRECISION (type); |
| 9751 | bool unsigned_p = TYPE_UNSIGNED (type); |
| 9752 | |
| 9753 | u (v: (prec << 1) | unsigned_p); |
| 9754 | } |
| 9755 | } |
| 9756 | break; |
| 9757 | |
| 9758 | case METHOD_TYPE: |
| 9759 | case FUNCTION_TYPE: |
| 9760 | { |
| 9761 | gcc_checking_assert (type_memfn_rqual (type) == REF_QUAL_NONE); |
| 9762 | |
| 9763 | tree arg_types = TYPE_ARG_TYPES (type); |
| 9764 | if (TREE_CODE (type) == METHOD_TYPE) |
| 9765 | { |
| 9766 | tree_node (TREE_TYPE (TREE_VALUE (arg_types))); |
| 9767 | arg_types = TREE_CHAIN (arg_types); |
| 9768 | } |
| 9769 | tree_node (arg_types); |
| 9770 | } |
| 9771 | break; |
| 9772 | |
| 9773 | case OFFSET_TYPE: |
| 9774 | tree_node (TYPE_OFFSET_BASETYPE (type)); |
| 9775 | break; |
| 9776 | |
| 9777 | case POINTER_TYPE: |
| 9778 | /* No additional data. */ |
| 9779 | break; |
| 9780 | |
| 9781 | case REFERENCE_TYPE: |
| 9782 | if (streaming_p ()) |
| 9783 | u (TYPE_REF_IS_RVALUE (type)); |
| 9784 | break; |
| 9785 | |
| 9786 | case DECLTYPE_TYPE: |
| 9787 | case TYPEOF_TYPE: |
| 9788 | case DEPENDENT_OPERATOR_TYPE: |
| 9789 | tree_node (TYPE_VALUES_RAW (type)); |
| 9790 | if (TREE_CODE (type) == DECLTYPE_TYPE) |
| 9791 | /* We stash a whole bunch of things into decltype's |
| 9792 | flags. */ |
| 9793 | if (streaming_p ()) |
| 9794 | tree_node_bools (t: type); |
| 9795 | break; |
| 9796 | |
| 9797 | case TRAIT_TYPE: |
| 9798 | tree_node (TRAIT_TYPE_KIND_RAW (type)); |
| 9799 | tree_node (TRAIT_TYPE_TYPE1 (type)); |
| 9800 | tree_node (TRAIT_TYPE_TYPE2 (type)); |
| 9801 | break; |
| 9802 | |
| 9803 | case TYPE_ARGUMENT_PACK: |
| 9804 | /* No additional data. */ |
| 9805 | break; |
| 9806 | |
| 9807 | case TYPE_PACK_EXPANSION: |
| 9808 | if (streaming_p ()) |
| 9809 | u (PACK_EXPANSION_LOCAL_P (type)); |
| 9810 | tree_node (PACK_EXPANSION_PARAMETER_PACKS (type)); |
| 9811 | tree_node (PACK_EXPANSION_EXTRA_ARGS (type)); |
| 9812 | break; |
| 9813 | |
| 9814 | case PACK_INDEX_TYPE: |
| 9815 | tree_node (PACK_INDEX_PACK (type)); |
| 9816 | tree_node (PACK_INDEX_INDEX (type)); |
| 9817 | break; |
| 9818 | |
| 9819 | case TYPENAME_TYPE: |
| 9820 | { |
| 9821 | tree_node (TYPE_CONTEXT (type)); |
| 9822 | tree_node (DECL_NAME (TYPE_NAME (type))); |
| 9823 | tree_node (TYPENAME_TYPE_FULLNAME (type)); |
| 9824 | if (streaming_p ()) |
| 9825 | u (v: get_typename_tag (t: type)); |
| 9826 | } |
| 9827 | break; |
| 9828 | |
| 9829 | case UNBOUND_CLASS_TEMPLATE: |
| 9830 | { |
| 9831 | tree decl = TYPE_NAME (type); |
| 9832 | tree_node (DECL_CONTEXT (decl)); |
| 9833 | tree_node (DECL_NAME (decl)); |
| 9834 | tree_node (DECL_TEMPLATE_PARMS (decl)); |
| 9835 | } |
| 9836 | break; |
| 9837 | |
| 9838 | case VECTOR_TYPE: |
| 9839 | if (streaming_p ()) |
| 9840 | { |
| 9841 | poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (node: type); |
| 9842 | for (unsigned ix = 0; ix != NUM_POLY_INT_COEFFS; ix++) |
| 9843 | wu (v: nunits.coeffs[ix]); |
| 9844 | } |
| 9845 | break; |
| 9846 | |
| 9847 | case META_TYPE: |
| 9848 | /* No additional data. */ |
| 9849 | break; |
| 9850 | |
| 9851 | case SPLICE_SCOPE: |
| 9852 | if (streaming_p ()) |
| 9853 | u (SPLICE_SCOPE_TYPE_P (type)); |
| 9854 | tree_node (SPLICE_SCOPE_EXPR (type)); |
| 9855 | break; |
| 9856 | } |
| 9857 | |
| 9858 | tree_node (TYPE_ATTRIBUTES (type)); |
| 9859 | |
| 9860 | /* We may have met the type during emitting the above. */ |
| 9861 | if (ref_node (t: type) != WK_none) |
| 9862 | { |
| 9863 | int tag = insert (t: type); |
| 9864 | if (streaming_p ()) |
| 9865 | { |
| 9866 | i (v: 0); |
| 9867 | dump (dumper::TREE) |
| 9868 | && dump ("Wrote:%d derived type %C" , tag, TREE_CODE (type)); |
| 9869 | } |
| 9870 | } |
| 9871 | |
| 9872 | return; |
| 9873 | } |
| 9874 | |
| 9875 | /* T is (mostly*) a non-mergeable node that must be written by value. |
| 9876 | The mergeable case is a BINFO, which are as-if DECLSs. */ |
| 9877 | |
| 9878 | void |
| 9879 | trees_out::tree_value (tree t) |
| 9880 | { |
| 9881 | /* We should never be writing a type by value. tree_type should |
| 9882 | have streamed it, or we're going via its TYPE_DECL. */ |
| 9883 | gcc_checking_assert (!TYPE_P (t)); |
| 9884 | |
| 9885 | if (DECL_P (t)) |
| 9886 | /* No template, type, var or function, except anonymous |
| 9887 | non-context vars and types. */ |
| 9888 | gcc_checking_assert ((TREE_CODE (t) != TEMPLATE_DECL |
| 9889 | && (TREE_CODE (t) != TYPE_DECL |
| 9890 | || (DECL_ARTIFICIAL (t) && !DECL_CONTEXT (t))) |
| 9891 | && (TREE_CODE (t) != VAR_DECL |
| 9892 | || ((!DECL_NAME (t) |
| 9893 | || IDENTIFIER_INTERNAL_P (DECL_NAME (t))) |
| 9894 | && !DECL_CONTEXT (t))) |
| 9895 | && TREE_CODE (t) != FUNCTION_DECL)); |
| 9896 | |
| 9897 | if (is_initial_scan () && EXPR_P (t)) |
| 9898 | dep_hash->add_dependent_adl_entities (expr: t); |
| 9899 | |
| 9900 | if (streaming_p ()) |
| 9901 | { |
| 9902 | /* A new node -> tt_node. */ |
| 9903 | tree_val_count++; |
| 9904 | i (v: tt_node); |
| 9905 | start (t); |
| 9906 | tree_node_bools (t); |
| 9907 | } |
| 9908 | |
| 9909 | if (TREE_CODE (t) == TREE_BINFO) |
| 9910 | /* Binfos are decl-like and need merging information. */ |
| 9911 | binfo_mergeable (binfo: t); |
| 9912 | |
| 9913 | int tag = insert (t, walk: WK_value); |
| 9914 | if (streaming_p ()) |
| 9915 | dump (dumper::TREE) |
| 9916 | && dump ("Writing tree:%d %C:%N" , tag, TREE_CODE (t), t); |
| 9917 | |
| 9918 | int type_tag = 0; |
| 9919 | tree type = NULL_TREE; |
| 9920 | if (TREE_CODE (t) == TYPE_DECL) |
| 9921 | { |
| 9922 | type = TREE_TYPE (t); |
| 9923 | |
| 9924 | /* We only support a limited set of features for uncontexted types; |
| 9925 | these are typically types created in the language-independent |
| 9926 | parts of the frontend (such as ubsan). */ |
| 9927 | gcc_checking_assert (RECORD_OR_UNION_TYPE_P (type) |
| 9928 | && TYPE_MAIN_VARIANT (type) == type |
| 9929 | && TYPE_NAME (type) == t |
| 9930 | && TYPE_STUB_DECL (type) == t |
| 9931 | && !TYPE_VFIELD (type) |
| 9932 | && !TYPE_BINFO (type) |
| 9933 | && !CLASS_TYPE_P (type)); |
| 9934 | |
| 9935 | if (streaming_p ()) |
| 9936 | { |
| 9937 | start (t: type); |
| 9938 | tree_node_bools (t: type); |
| 9939 | } |
| 9940 | |
| 9941 | type_tag = insert (t: type, walk: WK_value); |
| 9942 | if (streaming_p ()) |
| 9943 | dump (dumper::TREE) |
| 9944 | && dump ("Writing type: %d %C:%N" , type_tag, |
| 9945 | TREE_CODE (type), type); |
| 9946 | } |
| 9947 | |
| 9948 | tree_node_vals (t); |
| 9949 | |
| 9950 | if (type) |
| 9951 | { |
| 9952 | tree_node_vals (t: type); |
| 9953 | tree_node (TYPE_SIZE (type)); |
| 9954 | tree_node (TYPE_SIZE_UNIT (type)); |
| 9955 | chained_decls (TYPE_FIELDS (type)); |
| 9956 | if (streaming_p ()) |
| 9957 | dump (dumper::TREE) |
| 9958 | && dump ("Written type:%d %C:%N" , type_tag, TREE_CODE (type), type); |
| 9959 | } |
| 9960 | |
| 9961 | /* For uncontexted VAR_DECLs we need to stream the definition so that |
| 9962 | importers can recreate their value. */ |
| 9963 | if (TREE_CODE (t) == VAR_DECL) |
| 9964 | { |
| 9965 | gcc_checking_assert (!DECL_NONTRIVIALLY_INITIALIZED_P (t)); |
| 9966 | tree_node (DECL_INITIAL (t)); |
| 9967 | } |
| 9968 | |
| 9969 | if (streaming_p ()) |
| 9970 | dump (dumper::TREE) && dump ("Written tree:%d %C:%N" , tag, TREE_CODE (t), t); |
| 9971 | } |
| 9972 | |
| 9973 | tree |
| 9974 | trees_in::tree_value () |
| 9975 | { |
| 9976 | tree t = start (); |
| 9977 | if (!t || !tree_node_bools (t)) |
| 9978 | return NULL_TREE; |
| 9979 | |
| 9980 | tree existing = t; |
| 9981 | if (TREE_CODE (t) == TREE_BINFO) |
| 9982 | { |
| 9983 | tree type; |
| 9984 | unsigned ix = binfo_mergeable (&type); |
| 9985 | if (TYPE_BINFO (type)) |
| 9986 | { |
| 9987 | /* We already have a definition, this must be a duplicate. */ |
| 9988 | dump (dumper::MERGE) |
| 9989 | && dump ("Deduping binfo %N[%u]" , type, ix); |
| 9990 | existing = TYPE_BINFO (type); |
| 9991 | while (existing && ix--) |
| 9992 | existing = TREE_CHAIN (existing); |
| 9993 | if (existing) |
| 9994 | register_duplicate (decl: t, existing); |
| 9995 | else |
| 9996 | /* Error, mismatch -- diagnose in read_class_def's |
| 9997 | checking. */ |
| 9998 | existing = t; |
| 9999 | } |
| 10000 | } |
| 10001 | |
| 10002 | /* Insert into map. */ |
| 10003 | int tag = insert (t: existing); |
| 10004 | dump (dumper::TREE) |
| 10005 | && dump ("Reading tree:%d %C" , tag, TREE_CODE (t)); |
| 10006 | |
| 10007 | int type_tag = 0; |
| 10008 | tree type = NULL_TREE; |
| 10009 | if (TREE_CODE (t) == TYPE_DECL) |
| 10010 | { |
| 10011 | type = start (); |
| 10012 | if (!type || !tree_node_bools (t: type)) |
| 10013 | t = NULL_TREE; |
| 10014 | |
| 10015 | type_tag = insert (t: type); |
| 10016 | if (t) |
| 10017 | dump (dumper::TREE) |
| 10018 | && dump ("Reading type:%d %C" , type_tag, TREE_CODE (type)); |
| 10019 | } |
| 10020 | |
| 10021 | if (!t) |
| 10022 | { |
| 10023 | bail: |
| 10024 | back_refs[~tag] = NULL_TREE; |
| 10025 | if (type_tag) |
| 10026 | back_refs[~type_tag] = NULL_TREE; |
| 10027 | set_overrun (); |
| 10028 | return NULL_TREE; |
| 10029 | } |
| 10030 | |
| 10031 | if (!tree_node_vals (t)) |
| 10032 | goto bail; |
| 10033 | |
| 10034 | if (type) |
| 10035 | { |
| 10036 | if (!tree_node_vals (t: type)) |
| 10037 | goto bail; |
| 10038 | |
| 10039 | TYPE_SIZE (type) = tree_node (); |
| 10040 | TYPE_SIZE_UNIT (type) = tree_node (); |
| 10041 | TYPE_FIELDS (type) = chained_decls (); |
| 10042 | if (get_overrun ()) |
| 10043 | goto bail; |
| 10044 | |
| 10045 | dump (dumper::TREE) |
| 10046 | && dump ("Read type:%d %C:%N" , type_tag, TREE_CODE (type), type); |
| 10047 | } |
| 10048 | |
| 10049 | if (TREE_CODE (t) == VAR_DECL) |
| 10050 | { |
| 10051 | DECL_INITIAL (t) = tree_node (); |
| 10052 | if (TREE_STATIC (t)) |
| 10053 | varpool_node::finalize_decl (decl: t); |
| 10054 | } |
| 10055 | |
| 10056 | if (TREE_CODE (t) == LAMBDA_EXPR |
| 10057 | && CLASSTYPE_LAMBDA_EXPR (TREE_TYPE (t))) |
| 10058 | { |
| 10059 | existing = CLASSTYPE_LAMBDA_EXPR (TREE_TYPE (t)); |
| 10060 | back_refs[~tag] = existing; |
| 10061 | } |
| 10062 | |
| 10063 | dump (dumper::TREE) && dump ("Read tree:%d %C:%N" , tag, TREE_CODE (t), t); |
| 10064 | |
| 10065 | if (TREE_CODE (existing) == INTEGER_CST && !TREE_OVERFLOW (existing)) |
| 10066 | { |
| 10067 | existing = cache_integer_cst (t, might_duplicate: true); |
| 10068 | back_refs[~tag] = existing; |
| 10069 | } |
| 10070 | |
| 10071 | return existing; |
| 10072 | } |
| 10073 | |
| 10074 | /* Whether DECL has a TU-local dependency in the hash. */ |
| 10075 | |
| 10076 | bool |
| 10077 | trees_out::has_tu_local_dep (tree decl) const |
| 10078 | { |
| 10079 | /* Only the contexts of fields or enums remember that they're |
| 10080 | TU-local. */ |
| 10081 | if (DECL_CONTEXT (decl) |
| 10082 | && (TREE_CODE (decl) == FIELD_DECL |
| 10083 | || TREE_CODE (decl) == CONST_DECL)) |
| 10084 | decl = TYPE_NAME (DECL_CONTEXT (decl)); |
| 10085 | |
| 10086 | depset *dep = dep_hash->find_dependency (entity: decl); |
| 10087 | if (!dep) |
| 10088 | { |
| 10089 | /* This might be the DECL_TEMPLATE_RESULT of a TEMPLATE_DECL |
| 10090 | which we found was TU-local and gave up early. */ |
| 10091 | int use_tpl = -1; |
| 10092 | if (tree ti = node_template_info (decl, use&: use_tpl)) |
| 10093 | dep = dep_hash->find_dependency (TI_TEMPLATE (ti)); |
| 10094 | } |
| 10095 | |
| 10096 | return dep && dep->is_tu_local (); |
| 10097 | } |
| 10098 | |
| 10099 | /* If T depends on a TU-local entity, return that decl. */ |
| 10100 | |
| 10101 | tree |
| 10102 | trees_out::find_tu_local_decl (tree t) |
| 10103 | { |
| 10104 | /* We need to have walked all deps first before we can check. */ |
| 10105 | gcc_checking_assert (!is_initial_scan ()); |
| 10106 | |
| 10107 | auto walker = [](tree *tp, int *walk_subtrees, void *data) -> tree |
| 10108 | { |
| 10109 | auto self = (trees_out *)data; |
| 10110 | |
| 10111 | tree decl = NULL_TREE; |
| 10112 | if (TYPE_P (*tp)) |
| 10113 | { |
| 10114 | /* A PMF type is a record type, which we otherwise wouldn't walk; |
| 10115 | return whether the function type is TU-local. */ |
| 10116 | if (TYPE_PTRMEMFUNC_P (*tp)) |
| 10117 | { |
| 10118 | *walk_subtrees = 0; |
| 10119 | return self->find_tu_local_decl (TYPE_PTRMEMFUNC_FN_TYPE (*tp)); |
| 10120 | } |
| 10121 | else |
| 10122 | decl = TYPE_MAIN_DECL (*tp); |
| 10123 | } |
| 10124 | else if (DECL_P (*tp)) |
| 10125 | decl = *tp; |
| 10126 | |
| 10127 | if (decl) |
| 10128 | { |
| 10129 | /* We found a DECL, this will tell us whether we're TU-local. */ |
| 10130 | *walk_subtrees = 0; |
| 10131 | return self->has_tu_local_dep (decl) ? decl : NULL_TREE; |
| 10132 | } |
| 10133 | return NULL_TREE; |
| 10134 | }; |
| 10135 | |
| 10136 | /* We need to walk without duplicates so that we step into the pointed-to |
| 10137 | types of array types. */ |
| 10138 | return cp_walk_tree_without_duplicates (&t, walker, this); |
| 10139 | } |
| 10140 | |
| 10141 | /* Get the name for TU-local decl T to be used in diagnostics. */ |
| 10142 | |
| 10143 | static tree |
| 10144 | name_for_tu_local_decl (tree t) |
| 10145 | { |
| 10146 | int flags = (TFF_SCOPE | TFF_DECL_SPECIFIERS); |
| 10147 | const char *str = decl_as_string (t, flags); |
| 10148 | return get_identifier (str); |
| 10149 | } |
| 10150 | |
| 10151 | /* Stream out tree node T. We automatically create local back |
| 10152 | references, which is essentially a single pass lisp |
| 10153 | self-referential structure pretty-printer. */ |
| 10154 | |
| 10155 | void |
| 10156 | trees_out::tree_node (tree t) |
| 10157 | { |
| 10158 | dump.indent (); |
| 10159 | walk_kind ref = ref_node (t); |
| 10160 | if (ref == WK_none) |
| 10161 | goto done; |
| 10162 | |
| 10163 | /* Find TU-local entities and intercept streaming to instead write a |
| 10164 | placeholder value; this way we don't need to emit such decls. |
| 10165 | We only need to do this when writing a definition of an entity |
| 10166 | that we know names a TU-local entity. */ |
| 10167 | if (!is_initial_scan () && writing_local_entities) |
| 10168 | { |
| 10169 | tree local_decl = NULL_TREE; |
| 10170 | if (DECL_P (t) && has_tu_local_dep (decl: t)) |
| 10171 | local_decl = t; |
| 10172 | /* Consider a type to be TU-local if it refers to any TU-local decl, |
| 10173 | no matter how deep. |
| 10174 | |
| 10175 | This worsens diagnostics slightly, as we often no longer point |
| 10176 | directly to the at-fault entity when instantiating. However, this |
| 10177 | reduces the module size slightly and means that much less of pt.cc |
| 10178 | needs to know about us. */ |
| 10179 | else if (TYPE_P (t)) |
| 10180 | local_decl = find_tu_local_decl (t); |
| 10181 | else if (EXPR_P (t)) |
| 10182 | local_decl = find_tu_local_decl (TREE_TYPE (t)); |
| 10183 | |
| 10184 | if (local_decl) |
| 10185 | { |
| 10186 | int tag = insert (t, walk: WK_value); |
| 10187 | if (streaming_p ()) |
| 10188 | { |
| 10189 | tu_local_count++; |
| 10190 | i (v: tt_tu_local); |
| 10191 | dump (dumper::TREE) |
| 10192 | && dump ("Writing TU-local entity:%d %C:%N" , |
| 10193 | tag, TREE_CODE (t), t); |
| 10194 | } |
| 10195 | tree_node (t: name_for_tu_local_decl (t: local_decl)); |
| 10196 | if (state) |
| 10197 | state->write_location (*this, DECL_SOURCE_LOCATION (local_decl)); |
| 10198 | goto done; |
| 10199 | } |
| 10200 | } |
| 10201 | |
| 10202 | if (ref != WK_normal) |
| 10203 | goto skip_normal; |
| 10204 | |
| 10205 | if (TREE_CODE (t) == IDENTIFIER_NODE) |
| 10206 | { |
| 10207 | /* An identifier node -> tt_id, tt_conv_id, tt_anon_id, tt_lambda_id, |
| 10208 | tt_internal_id. */ |
| 10209 | int code = tt_id; |
| 10210 | if (IDENTIFIER_ANON_P (t)) |
| 10211 | code = IDENTIFIER_LAMBDA_P (t) ? tt_lambda_id : tt_anon_id; |
| 10212 | else if (IDENTIFIER_INTERNAL_P (t)) |
| 10213 | code = tt_internal_id; |
| 10214 | else if (IDENTIFIER_CONV_OP_P (t)) |
| 10215 | code = tt_conv_id; |
| 10216 | |
| 10217 | if (streaming_p ()) |
| 10218 | i (v: code); |
| 10219 | |
| 10220 | if (code == tt_conv_id) |
| 10221 | { |
| 10222 | tree type = TREE_TYPE (t); |
| 10223 | gcc_checking_assert (type || t == conv_op_identifier); |
| 10224 | tree_node (t: type); |
| 10225 | } |
| 10226 | else if (code == tt_id && streaming_p ()) |
| 10227 | str (IDENTIFIER_POINTER (t), IDENTIFIER_LENGTH (t)); |
| 10228 | else if (code == tt_internal_id && streaming_p ()) |
| 10229 | str (ptr: prefix_for_internal_label (label: t)); |
| 10230 | |
| 10231 | int tag = insert (t); |
| 10232 | if (streaming_p ()) |
| 10233 | { |
| 10234 | /* We know the ordering of the 5 id tags. */ |
| 10235 | static const char *const kinds[] = |
| 10236 | {"" , "conv_op " , "anon " , "lambda " , "internal " }; |
| 10237 | dump (dumper::TREE) |
| 10238 | && dump ("Written:%d %sidentifier:%N" , tag, |
| 10239 | kinds[code - tt_id], |
| 10240 | code == tt_conv_id ? TREE_TYPE (t) : t); |
| 10241 | } |
| 10242 | goto done; |
| 10243 | } |
| 10244 | |
| 10245 | if (TREE_CODE (t) == TREE_BINFO) |
| 10246 | { |
| 10247 | /* A BINFO -> tt_binfo. |
| 10248 | We must do this by reference. We stream the binfo tree |
| 10249 | itself when streaming its owning RECORD_TYPE. That we got |
| 10250 | here means the dominating type is not in this SCC. */ |
| 10251 | if (streaming_p ()) |
| 10252 | i (v: tt_binfo); |
| 10253 | binfo_mergeable (binfo: t); |
| 10254 | gcc_checking_assert (!TREE_VISITED (t)); |
| 10255 | int tag = insert (t); |
| 10256 | if (streaming_p ()) |
| 10257 | dump (dumper::TREE) && dump ("Inserting binfo:%d %N" , tag, t); |
| 10258 | goto done; |
| 10259 | } |
| 10260 | |
| 10261 | if (TREE_CODE (t) == INTEGER_CST |
| 10262 | && !TREE_OVERFLOW (t) |
| 10263 | && TREE_CODE (TREE_TYPE (t)) == ENUMERAL_TYPE) |
| 10264 | { |
| 10265 | /* An integral constant of enumeral type. See if it matches one |
| 10266 | of the enumeration values. */ |
| 10267 | for (tree values = TYPE_VALUES (TREE_TYPE (t)); |
| 10268 | values; values = TREE_CHAIN (values)) |
| 10269 | { |
| 10270 | tree decl = TREE_VALUE (values); |
| 10271 | if (tree_int_cst_equal (DECL_INITIAL (decl), t)) |
| 10272 | { |
| 10273 | if (streaming_p ()) |
| 10274 | u (v: tt_enum_value); |
| 10275 | tree_node (t: decl); |
| 10276 | dump (dumper::TREE) && dump ("Written enum value %N" , decl); |
| 10277 | goto done; |
| 10278 | } |
| 10279 | } |
| 10280 | /* It didn't match. We'll write it a an explicit INTEGER_CST |
| 10281 | node. */ |
| 10282 | } |
| 10283 | |
| 10284 | if (TYPE_P (t)) |
| 10285 | { |
| 10286 | type_node (type: t); |
| 10287 | goto done; |
| 10288 | } |
| 10289 | |
| 10290 | if (DECL_P (t)) |
| 10291 | { |
| 10292 | if (DECL_TEMPLATE_PARM_P (t)) |
| 10293 | { |
| 10294 | tpl_parm_value (parm: t); |
| 10295 | goto done; |
| 10296 | } |
| 10297 | |
| 10298 | if (!DECL_CONTEXT (t)) |
| 10299 | { |
| 10300 | /* There are a few cases of decls with no context. We'll write |
| 10301 | these by value, but first assert they are cases we expect. */ |
| 10302 | gcc_checking_assert (ref == WK_normal); |
| 10303 | switch (TREE_CODE (t)) |
| 10304 | { |
| 10305 | default: gcc_unreachable (); |
| 10306 | |
| 10307 | case LABEL_DECL: |
| 10308 | /* CASE_LABEL_EXPRs contain uncontexted LABEL_DECLs. */ |
| 10309 | gcc_checking_assert (!DECL_NAME (t)); |
| 10310 | break; |
| 10311 | |
| 10312 | case VAR_DECL: |
| 10313 | /* AGGR_INIT_EXPRs cons up anonymous uncontexted VAR_DECLs, |
| 10314 | and internal vars are created by sanitizers and |
| 10315 | __builtin_source_location. */ |
| 10316 | gcc_checking_assert ((!DECL_NAME (t) |
| 10317 | || IDENTIFIER_INTERNAL_P (DECL_NAME (t))) |
| 10318 | && DECL_ARTIFICIAL (t)); |
| 10319 | break; |
| 10320 | |
| 10321 | case PARM_DECL: |
| 10322 | /* REQUIRES_EXPRs have a chain of uncontexted PARM_DECLS, |
| 10323 | and an implicit this parm in an NSDMI has no context. */ |
| 10324 | gcc_checking_assert (CONSTRAINT_VAR_P (t) |
| 10325 | || DECL_NAME (t) == this_identifier); |
| 10326 | break; |
| 10327 | |
| 10328 | case TYPE_DECL: |
| 10329 | /* Some parts of the compiler need internal struct types; |
| 10330 | these types may not have an appropriate context to use. |
| 10331 | Walk the whole type (including its definition) by value. */ |
| 10332 | gcc_checking_assert (DECL_ARTIFICIAL (t) |
| 10333 | && TYPE_ARTIFICIAL (TREE_TYPE (t)) |
| 10334 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t)) |
| 10335 | && !CLASS_TYPE_P (TREE_TYPE (t))); |
| 10336 | break; |
| 10337 | } |
| 10338 | mark_declaration (decl: t, do_defn: has_definition (decl: t)); |
| 10339 | goto by_value; |
| 10340 | } |
| 10341 | } |
| 10342 | |
| 10343 | skip_normal: |
| 10344 | if (DECL_P (t) && !decl_node (decl: t, ref)) |
| 10345 | goto done; |
| 10346 | |
| 10347 | /* Otherwise by value */ |
| 10348 | by_value: |
| 10349 | tree_value (t); |
| 10350 | |
| 10351 | done: |
| 10352 | /* And, breath out. */ |
| 10353 | dump.outdent (); |
| 10354 | } |
| 10355 | |
| 10356 | /* Stream in a tree node. */ |
| 10357 | |
| 10358 | tree |
| 10359 | trees_in::tree_node (bool is_use) |
| 10360 | { |
| 10361 | if (get_overrun ()) |
| 10362 | return NULL_TREE; |
| 10363 | |
| 10364 | dump.indent (); |
| 10365 | int tag = i (); |
| 10366 | tree res = NULL_TREE; |
| 10367 | switch (tag) |
| 10368 | { |
| 10369 | default: |
| 10370 | /* backref, pull it out of the map. */ |
| 10371 | res = back_ref (tag); |
| 10372 | break; |
| 10373 | |
| 10374 | case tt_null: |
| 10375 | /* NULL_TREE. */ |
| 10376 | break; |
| 10377 | |
| 10378 | case tt_tu_local: |
| 10379 | { |
| 10380 | /* A translation-unit-local entity. */ |
| 10381 | res = make_node (TU_LOCAL_ENTITY); |
| 10382 | int tag = insert (t: res); |
| 10383 | |
| 10384 | TU_LOCAL_ENTITY_NAME (res) = tree_node (); |
| 10385 | TU_LOCAL_ENTITY_LOCATION (res) = state->read_location (*this); |
| 10386 | dump (dumper::TREE) && dump ("Read TU-local entity:%d %N" , tag, res); |
| 10387 | } |
| 10388 | break; |
| 10389 | |
| 10390 | case tt_fixed: |
| 10391 | /* A fixed ref, find it in the fixed_ref array. */ |
| 10392 | { |
| 10393 | unsigned fix = u (); |
| 10394 | if (fix < (*fixed_trees).length ()) |
| 10395 | { |
| 10396 | res = (*fixed_trees)[fix]; |
| 10397 | dump (dumper::TREE) && dump ("Read fixed:%u %C:%N%S" , fix, |
| 10398 | TREE_CODE (res), res, res); |
| 10399 | } |
| 10400 | |
| 10401 | if (!res) |
| 10402 | set_overrun (); |
| 10403 | } |
| 10404 | break; |
| 10405 | |
| 10406 | case tt_parm: |
| 10407 | { |
| 10408 | tree fn = tree_node (); |
| 10409 | if (fn && TREE_CODE (fn) == FUNCTION_DECL) |
| 10410 | res = tree_node (); |
| 10411 | if (res) |
| 10412 | dump (dumper::TREE) |
| 10413 | && dump ("Read %s reference %N" , |
| 10414 | TREE_CODE (res) == PARM_DECL ? "parameter" : "result" , |
| 10415 | res); |
| 10416 | } |
| 10417 | break; |
| 10418 | |
| 10419 | case tt_node: |
| 10420 | /* A new node. Stream it in. */ |
| 10421 | res = tree_value (); |
| 10422 | break; |
| 10423 | |
| 10424 | case tt_decl: |
| 10425 | /* A new decl. Stream it in. */ |
| 10426 | res = decl_value (); |
| 10427 | break; |
| 10428 | |
| 10429 | case tt_tpl_parm: |
| 10430 | /* A template parameter. Stream it in. */ |
| 10431 | res = tpl_parm_value (); |
| 10432 | break; |
| 10433 | |
| 10434 | case tt_id: |
| 10435 | /* An identifier node. */ |
| 10436 | { |
| 10437 | size_t l; |
| 10438 | const char *chars = str (len_p: &l); |
| 10439 | res = get_identifier_with_length (chars, l); |
| 10440 | int tag = insert (t: res); |
| 10441 | dump (dumper::TREE) |
| 10442 | && dump ("Read identifier:%d %N" , tag, res); |
| 10443 | } |
| 10444 | break; |
| 10445 | |
| 10446 | case tt_conv_id: |
| 10447 | /* A conversion operator. Get the type and recreate the |
| 10448 | identifier. */ |
| 10449 | { |
| 10450 | tree type = tree_node (); |
| 10451 | if (!get_overrun ()) |
| 10452 | { |
| 10453 | res = type ? make_conv_op_name (type) : conv_op_identifier; |
| 10454 | int tag = insert (t: res); |
| 10455 | dump (dumper::TREE) |
| 10456 | && dump ("Created conv_op:%d %S for %N" , tag, res, type); |
| 10457 | } |
| 10458 | } |
| 10459 | break; |
| 10460 | |
| 10461 | case tt_anon_id: |
| 10462 | case tt_lambda_id: |
| 10463 | /* An anonymous or lambda id. */ |
| 10464 | { |
| 10465 | res = make_anon_name (); |
| 10466 | if (tag == tt_lambda_id) |
| 10467 | IDENTIFIER_LAMBDA_P (res) = true; |
| 10468 | int tag = insert (t: res); |
| 10469 | dump (dumper::TREE) |
| 10470 | && dump ("Read %s identifier:%d %N" , |
| 10471 | IDENTIFIER_LAMBDA_P (res) ? "lambda" : "anon" , tag, res); |
| 10472 | } |
| 10473 | break; |
| 10474 | |
| 10475 | case tt_internal_id: |
| 10476 | /* An internal label. */ |
| 10477 | { |
| 10478 | const char *prefix = str (); |
| 10479 | res = generate_internal_label (prefix); |
| 10480 | int tag = insert (t: res); |
| 10481 | dump (dumper::TREE) |
| 10482 | && dump ("Read internal identifier:%d %N" , tag, res); |
| 10483 | } |
| 10484 | break; |
| 10485 | |
| 10486 | case tt_typedef_type: |
| 10487 | res = tree_node (); |
| 10488 | if (res) |
| 10489 | { |
| 10490 | dump (dumper::TREE) |
| 10491 | && dump ("Read %stypedef %C:%N" , |
| 10492 | DECL_IMPLICIT_TYPEDEF_P (res) ? "implicit " : "" , |
| 10493 | TREE_CODE (res), res); |
| 10494 | if (TREE_CODE (res) != TU_LOCAL_ENTITY) |
| 10495 | res = TREE_TYPE (res); |
| 10496 | } |
| 10497 | break; |
| 10498 | |
| 10499 | case tt_derived_type: |
| 10500 | /* A type derived from some other type. */ |
| 10501 | { |
| 10502 | enum tree_code code = tree_code (u ()); |
| 10503 | res = tree_node (); |
| 10504 | |
| 10505 | switch (code) |
| 10506 | { |
| 10507 | default: |
| 10508 | set_overrun (); |
| 10509 | break; |
| 10510 | |
| 10511 | case ARRAY_TYPE: |
| 10512 | { |
| 10513 | tree elt_type = res; |
| 10514 | tree domain = tree_node (); |
| 10515 | int dep = u (); |
| 10516 | if (!get_overrun ()) |
| 10517 | { |
| 10518 | res = build_cplus_array_type (elt_type, domain, is_dep: dep); |
| 10519 | /* If we're an array of an incomplete imported type, |
| 10520 | save it for post-processing so that we can attempt |
| 10521 | to complete the type later if it will get a |
| 10522 | definition later in the cluster. */ |
| 10523 | if (!dep |
| 10524 | && !COMPLETE_TYPE_P (elt_type) |
| 10525 | && CLASS_TYPE_P (elt_type) |
| 10526 | && DECL_LANG_SPECIFIC (TYPE_NAME (elt_type)) |
| 10527 | && DECL_MODULE_IMPORT_P (TYPE_NAME (elt_type))) |
| 10528 | post_process_type (type: res); |
| 10529 | } |
| 10530 | } |
| 10531 | break; |
| 10532 | |
| 10533 | case COMPLEX_TYPE: |
| 10534 | if (!get_overrun ()) |
| 10535 | res = build_complex_type (res); |
| 10536 | break; |
| 10537 | |
| 10538 | case BOOLEAN_TYPE: |
| 10539 | { |
| 10540 | unsigned precision = u (); |
| 10541 | if (!get_overrun ()) |
| 10542 | res = build_nonstandard_boolean_type (precision); |
| 10543 | } |
| 10544 | break; |
| 10545 | |
| 10546 | case INTEGER_TYPE: |
| 10547 | if (res) |
| 10548 | { |
| 10549 | /* A range type (representing an array domain). */ |
| 10550 | tree min = tree_node (); |
| 10551 | tree max = tree_node (); |
| 10552 | |
| 10553 | if (!get_overrun ()) |
| 10554 | res = build_range_type (res, min, max); |
| 10555 | } |
| 10556 | else |
| 10557 | { |
| 10558 | /* A new integral type (representing a bitfield). */ |
| 10559 | unsigned enc = u (); |
| 10560 | if (!get_overrun ()) |
| 10561 | res = build_nonstandard_integer_type (enc >> 1, enc & 1); |
| 10562 | } |
| 10563 | break; |
| 10564 | |
| 10565 | case FUNCTION_TYPE: |
| 10566 | case METHOD_TYPE: |
| 10567 | { |
| 10568 | tree klass = code == METHOD_TYPE ? tree_node () : NULL_TREE; |
| 10569 | tree args = tree_node (); |
| 10570 | if (!get_overrun ()) |
| 10571 | { |
| 10572 | if (klass) |
| 10573 | res = build_method_type_directly (klass, res, args); |
| 10574 | else |
| 10575 | res = cp_build_function_type (res, args); |
| 10576 | } |
| 10577 | } |
| 10578 | break; |
| 10579 | |
| 10580 | case OFFSET_TYPE: |
| 10581 | { |
| 10582 | tree base = tree_node (); |
| 10583 | if (!get_overrun ()) |
| 10584 | res = build_offset_type (base, res); |
| 10585 | } |
| 10586 | break; |
| 10587 | |
| 10588 | case POINTER_TYPE: |
| 10589 | if (!get_overrun ()) |
| 10590 | res = build_pointer_type (res); |
| 10591 | break; |
| 10592 | |
| 10593 | case REFERENCE_TYPE: |
| 10594 | { |
| 10595 | bool rval = bool (u ()); |
| 10596 | if (!get_overrun ()) |
| 10597 | res = cp_build_reference_type (res, rval); |
| 10598 | } |
| 10599 | break; |
| 10600 | |
| 10601 | case DECLTYPE_TYPE: |
| 10602 | case TYPEOF_TYPE: |
| 10603 | case DEPENDENT_OPERATOR_TYPE: |
| 10604 | { |
| 10605 | tree expr = tree_node (); |
| 10606 | if (!get_overrun ()) |
| 10607 | { |
| 10608 | res = cxx_make_type (code); |
| 10609 | TYPE_VALUES_RAW (res) = expr; |
| 10610 | if (code == DECLTYPE_TYPE) |
| 10611 | tree_node_bools (t: res); |
| 10612 | SET_TYPE_STRUCTURAL_EQUALITY (res); |
| 10613 | } |
| 10614 | } |
| 10615 | break; |
| 10616 | |
| 10617 | case TRAIT_TYPE: |
| 10618 | { |
| 10619 | tree kind = tree_node (); |
| 10620 | tree type1 = tree_node (); |
| 10621 | tree type2 = tree_node (); |
| 10622 | if (!get_overrun ()) |
| 10623 | { |
| 10624 | res = cxx_make_type (TRAIT_TYPE); |
| 10625 | TRAIT_TYPE_KIND_RAW (res) = kind; |
| 10626 | TRAIT_TYPE_TYPE1 (res) = type1; |
| 10627 | TRAIT_TYPE_TYPE2 (res) = type2; |
| 10628 | SET_TYPE_STRUCTURAL_EQUALITY (res); |
| 10629 | } |
| 10630 | } |
| 10631 | break; |
| 10632 | |
| 10633 | case TYPE_ARGUMENT_PACK: |
| 10634 | if (!get_overrun ()) |
| 10635 | { |
| 10636 | tree pack = cxx_make_type (TYPE_ARGUMENT_PACK); |
| 10637 | ARGUMENT_PACK_ARGS (pack) = res; |
| 10638 | res = pack; |
| 10639 | } |
| 10640 | break; |
| 10641 | |
| 10642 | case TYPE_PACK_EXPANSION: |
| 10643 | { |
| 10644 | bool local = u (); |
| 10645 | tree param_packs = tree_node (); |
| 10646 | tree = tree_node (); |
| 10647 | if (!get_overrun ()) |
| 10648 | { |
| 10649 | tree expn = cxx_make_type (TYPE_PACK_EXPANSION); |
| 10650 | SET_TYPE_STRUCTURAL_EQUALITY (expn); |
| 10651 | PACK_EXPANSION_PATTERN (expn) = res; |
| 10652 | PACK_EXPANSION_PARAMETER_PACKS (expn) = param_packs; |
| 10653 | PACK_EXPANSION_EXTRA_ARGS (expn) = extra_args; |
| 10654 | PACK_EXPANSION_LOCAL_P (expn) = local; |
| 10655 | res = expn; |
| 10656 | } |
| 10657 | } |
| 10658 | break; |
| 10659 | |
| 10660 | case PACK_INDEX_TYPE: |
| 10661 | { |
| 10662 | tree pack = tree_node (); |
| 10663 | tree index = tree_node (); |
| 10664 | if (!get_overrun ()) |
| 10665 | res = make_pack_index (pack, index); |
| 10666 | } |
| 10667 | break; |
| 10668 | |
| 10669 | case TYPENAME_TYPE: |
| 10670 | { |
| 10671 | tree ctx = tree_node (); |
| 10672 | tree name = tree_node (); |
| 10673 | tree fullname = tree_node (); |
| 10674 | enum tag_types tag_type = tag_types (u ()); |
| 10675 | |
| 10676 | if (!get_overrun ()) |
| 10677 | res = build_typename_type (ctx, name, fullname, tag_type); |
| 10678 | } |
| 10679 | break; |
| 10680 | |
| 10681 | case UNBOUND_CLASS_TEMPLATE: |
| 10682 | { |
| 10683 | tree ctx = tree_node (); |
| 10684 | tree name = tree_node (); |
| 10685 | tree parms = tree_node (); |
| 10686 | |
| 10687 | if (!get_overrun ()) |
| 10688 | res = make_unbound_class_template_raw (ctx, name, parms); |
| 10689 | } |
| 10690 | break; |
| 10691 | |
| 10692 | case VECTOR_TYPE: |
| 10693 | { |
| 10694 | poly_uint64 nunits; |
| 10695 | for (unsigned ix = 0; ix != NUM_POLY_INT_COEFFS; ix++) |
| 10696 | nunits.coeffs[ix] = wu (); |
| 10697 | if (!get_overrun ()) |
| 10698 | res = build_vector_type (res, nunits); |
| 10699 | } |
| 10700 | break; |
| 10701 | |
| 10702 | case META_TYPE: |
| 10703 | if (!get_overrun ()) |
| 10704 | res = meta_info_type_node; |
| 10705 | break; |
| 10706 | |
| 10707 | case SPLICE_SCOPE: |
| 10708 | { |
| 10709 | bool type = u (); |
| 10710 | tree expr = tree_node (); |
| 10711 | |
| 10712 | if (!get_overrun ()) |
| 10713 | res = make_splice_scope (expr, type); |
| 10714 | } |
| 10715 | break; |
| 10716 | } |
| 10717 | |
| 10718 | /* In the exporting TU, a derived type with attributes was built by |
| 10719 | build_type_attribute_variant as a distinct copy, with itself as |
| 10720 | TYPE_MAIN_VARIANT. We repeat that on import to get the version |
| 10721 | without attributes as TYPE_CANONICAL. */ |
| 10722 | if (tree attribs = tree_node ()) |
| 10723 | res = cp_build_type_attribute_variant (res, attribs); |
| 10724 | |
| 10725 | int tag = i (); |
| 10726 | if (!tag) |
| 10727 | { |
| 10728 | tag = insert (t: res); |
| 10729 | if (res) |
| 10730 | dump (dumper::TREE) |
| 10731 | && dump ("Created:%d derived type %C" , tag, code); |
| 10732 | } |
| 10733 | else |
| 10734 | res = back_ref (tag); |
| 10735 | } |
| 10736 | break; |
| 10737 | |
| 10738 | case tt_variant_type: |
| 10739 | /* Variant of some type. */ |
| 10740 | { |
| 10741 | res = tree_node (); |
| 10742 | int flags = i (); |
| 10743 | if (get_overrun ()) |
| 10744 | ; |
| 10745 | else if (flags < 0) |
| 10746 | /* No change. */; |
| 10747 | else if (TREE_CODE (res) == FUNCTION_TYPE |
| 10748 | || TREE_CODE (res) == METHOD_TYPE) |
| 10749 | { |
| 10750 | cp_ref_qualifier rqual = cp_ref_qualifier (flags & 3); |
| 10751 | bool late = (flags >> 2) & 1; |
| 10752 | cp_cv_quals quals = cp_cv_quals (flags >> 3); |
| 10753 | |
| 10754 | tree raises = tree_node (); |
| 10755 | if (raises == error_mark_node) |
| 10756 | raises = TYPE_RAISES_EXCEPTIONS (res); |
| 10757 | |
| 10758 | res = build_cp_fntype_variant (res, rqual, raises, late); |
| 10759 | if (TREE_CODE (res) == FUNCTION_TYPE) |
| 10760 | res = apply_memfn_quals (res, quals, rqual); |
| 10761 | } |
| 10762 | else |
| 10763 | { |
| 10764 | res = build_aligned_type (res, (1u << flags) >> 1); |
| 10765 | TYPE_USER_ALIGN (res) = true; |
| 10766 | } |
| 10767 | |
| 10768 | int quals = i (); |
| 10769 | if (quals >= 0 && !get_overrun ()) |
| 10770 | res = cp_build_qualified_type (res, quals); |
| 10771 | |
| 10772 | int tag = i (); |
| 10773 | if (!tag) |
| 10774 | { |
| 10775 | tag = insert (t: res); |
| 10776 | if (res) |
| 10777 | dump (dumper::TREE) |
| 10778 | && dump ("Created:%d variant type %C" , tag, TREE_CODE (res)); |
| 10779 | } |
| 10780 | else |
| 10781 | res = back_ref (tag); |
| 10782 | } |
| 10783 | break; |
| 10784 | |
| 10785 | case tt_tinfo_var: |
| 10786 | case tt_tinfo_typedef: |
| 10787 | /* A tinfo var or typedef. */ |
| 10788 | { |
| 10789 | bool is_var = tag == tt_tinfo_var; |
| 10790 | unsigned ix = u (); |
| 10791 | tree type = NULL_TREE; |
| 10792 | |
| 10793 | if (is_var) |
| 10794 | { |
| 10795 | tree name = tree_node (); |
| 10796 | type = tree_node (); |
| 10797 | |
| 10798 | if (!get_overrun ()) |
| 10799 | res = get_tinfo_decl_direct (type, name, int (ix)); |
| 10800 | } |
| 10801 | else |
| 10802 | { |
| 10803 | if (!get_overrun ()) |
| 10804 | { |
| 10805 | type = get_pseudo_tinfo_type (ix); |
| 10806 | res = TYPE_NAME (type); |
| 10807 | } |
| 10808 | } |
| 10809 | if (res) |
| 10810 | { |
| 10811 | int tag = insert (t: res); |
| 10812 | dump (dumper::TREE) |
| 10813 | && dump ("Created tinfo_%s:%d %S:%u for %N" , |
| 10814 | is_var ? "var" : "decl" , tag, res, ix, type); |
| 10815 | if (!is_var) |
| 10816 | { |
| 10817 | tag = insert (t: type); |
| 10818 | dump (dumper::TREE) |
| 10819 | && dump ("Created tinfo_type:%d %u %N" , tag, ix, type); |
| 10820 | } |
| 10821 | } |
| 10822 | } |
| 10823 | break; |
| 10824 | |
| 10825 | case tt_ptrmem_type: |
| 10826 | /* A pointer to member function. */ |
| 10827 | { |
| 10828 | tree type = tree_node (); |
| 10829 | if (type && TREE_CODE (type) == POINTER_TYPE |
| 10830 | && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE) |
| 10831 | { |
| 10832 | res = build_ptrmemfunc_type (type); |
| 10833 | int tag = insert (t: res); |
| 10834 | dump (dumper::TREE) && dump ("Created:%d ptrmem type" , tag); |
| 10835 | } |
| 10836 | else |
| 10837 | set_overrun (); |
| 10838 | } |
| 10839 | break; |
| 10840 | |
| 10841 | case tt_nttp_var: |
| 10842 | /* An NTTP object. */ |
| 10843 | { |
| 10844 | tree init = tree_node (); |
| 10845 | tree name = tree_node (); |
| 10846 | if (!get_overrun ()) |
| 10847 | { |
| 10848 | /* We don't want to check the initializer as that may require |
| 10849 | name lookup, which could recursively start lazy loading. |
| 10850 | Instead we know that INIT is already valid so we can just |
| 10851 | apply that directly. */ |
| 10852 | res = get_template_parm_object (expr: init, mangle: name, /*check_init=*/false); |
| 10853 | int tag = insert (t: res); |
| 10854 | dump (dumper::TREE) |
| 10855 | && dump ("Created nttp object:%d %N" , tag, name); |
| 10856 | } |
| 10857 | } |
| 10858 | break; |
| 10859 | |
| 10860 | case tt_enum_value: |
| 10861 | /* An enum const value. */ |
| 10862 | { |
| 10863 | if (tree decl = tree_node ()) |
| 10864 | { |
| 10865 | dump (dumper::TREE) && dump ("Read enum value %N" , decl); |
| 10866 | res = DECL_INITIAL (decl); |
| 10867 | } |
| 10868 | |
| 10869 | if (!res) |
| 10870 | set_overrun (); |
| 10871 | } |
| 10872 | break; |
| 10873 | |
| 10874 | case tt_enum_decl: |
| 10875 | /* An enum decl. */ |
| 10876 | { |
| 10877 | tree ctx = tree_node (); |
| 10878 | tree name = tree_node (); |
| 10879 | |
| 10880 | if (!get_overrun () |
| 10881 | && TREE_CODE (ctx) == ENUMERAL_TYPE) |
| 10882 | res = find_enum_member (ctx, name); |
| 10883 | |
| 10884 | if (!res) |
| 10885 | set_overrun (); |
| 10886 | else |
| 10887 | { |
| 10888 | int tag = insert (t: res); |
| 10889 | dump (dumper::TREE) |
| 10890 | && dump ("Read enum decl:%d %C:%N" , tag, TREE_CODE (res), res); |
| 10891 | } |
| 10892 | } |
| 10893 | break; |
| 10894 | |
| 10895 | case tt_data_member: |
| 10896 | /* A data member. */ |
| 10897 | { |
| 10898 | tree ctx = tree_node (); |
| 10899 | tree name = tree_node (); |
| 10900 | |
| 10901 | if (!get_overrun () |
| 10902 | && RECORD_OR_UNION_TYPE_P (ctx)) |
| 10903 | { |
| 10904 | if (name) |
| 10905 | res = lookup_class_binding (ctx, name); |
| 10906 | else |
| 10907 | res = lookup_field_ident (ctx, ix: u ()); |
| 10908 | |
| 10909 | if (!res |
| 10910 | || (TREE_CODE (res) != FIELD_DECL |
| 10911 | && TREE_CODE (res) != USING_DECL) |
| 10912 | || DECL_CONTEXT (res) != ctx) |
| 10913 | res = NULL_TREE; |
| 10914 | } |
| 10915 | |
| 10916 | if (!res) |
| 10917 | set_overrun (); |
| 10918 | else |
| 10919 | { |
| 10920 | int tag = insert (t: res); |
| 10921 | dump (dumper::TREE) |
| 10922 | && dump ("Read member:%d %C:%N" , tag, TREE_CODE (res), res); |
| 10923 | } |
| 10924 | } |
| 10925 | break; |
| 10926 | |
| 10927 | case tt_binfo: |
| 10928 | /* A BINFO. Walk the tree of the dominating type. */ |
| 10929 | { |
| 10930 | tree type; |
| 10931 | unsigned ix = binfo_mergeable (&type); |
| 10932 | if (type) |
| 10933 | { |
| 10934 | res = TYPE_BINFO (type); |
| 10935 | for (; ix && res; res = TREE_CHAIN (res)) |
| 10936 | ix--; |
| 10937 | if (!res) |
| 10938 | set_overrun (); |
| 10939 | } |
| 10940 | |
| 10941 | if (get_overrun ()) |
| 10942 | break; |
| 10943 | |
| 10944 | /* Insert binfo into backreferences. */ |
| 10945 | tag = insert (t: res); |
| 10946 | dump (dumper::TREE) && dump ("Read binfo:%d %N" , tag, res); |
| 10947 | } |
| 10948 | break; |
| 10949 | |
| 10950 | case tt_vtable: |
| 10951 | { |
| 10952 | unsigned ix = u (); |
| 10953 | tree ctx = tree_node (); |
| 10954 | dump (dumper::TREE) && dump ("Reading vtable %N[%u]" , ctx, ix); |
| 10955 | if (TREE_CODE (ctx) == RECORD_TYPE && TYPE_LANG_SPECIFIC (ctx)) |
| 10956 | for (res = CLASSTYPE_VTABLES (ctx); res; res = DECL_CHAIN (res)) |
| 10957 | if (!ix--) |
| 10958 | break; |
| 10959 | if (!res) |
| 10960 | set_overrun (); |
| 10961 | } |
| 10962 | break; |
| 10963 | |
| 10964 | case tt_thunk: |
| 10965 | { |
| 10966 | int fixed = i (); |
| 10967 | tree target = tree_node (); |
| 10968 | tree virt = tree_node (); |
| 10969 | |
| 10970 | for (tree thunk = DECL_THUNKS (target); |
| 10971 | thunk; thunk = DECL_CHAIN (thunk)) |
| 10972 | if (THUNK_FIXED_OFFSET (thunk) == fixed |
| 10973 | && !THUNK_VIRTUAL_OFFSET (thunk) == !virt |
| 10974 | && (!virt |
| 10975 | || tree_int_cst_equal (virt, THUNK_VIRTUAL_OFFSET (thunk)))) |
| 10976 | { |
| 10977 | res = thunk; |
| 10978 | break; |
| 10979 | } |
| 10980 | |
| 10981 | int tag = insert (t: res); |
| 10982 | if (res) |
| 10983 | dump (dumper::TREE) |
| 10984 | && dump ("Read:%d thunk %N to %N" , tag, DECL_NAME (res), target); |
| 10985 | else |
| 10986 | set_overrun (); |
| 10987 | } |
| 10988 | break; |
| 10989 | |
| 10990 | case tt_clone_ref: |
| 10991 | { |
| 10992 | tree target = tree_node (); |
| 10993 | tree name = tree_node (); |
| 10994 | |
| 10995 | if (DECL_P (target) && DECL_MAYBE_IN_CHARGE_CDTOR_P (target)) |
| 10996 | { |
| 10997 | tree clone; |
| 10998 | FOR_EVERY_CLONE (clone, target) |
| 10999 | if (DECL_NAME (clone) == name) |
| 11000 | { |
| 11001 | res = clone; |
| 11002 | break; |
| 11003 | } |
| 11004 | } |
| 11005 | |
| 11006 | /* A clone might have a different vtable entry. */ |
| 11007 | if (res && DECL_VIRTUAL_P (res)) |
| 11008 | DECL_VINDEX (res) = tree_node (); |
| 11009 | |
| 11010 | if (!res) |
| 11011 | set_overrun (); |
| 11012 | int tag = insert (t: res); |
| 11013 | if (res) |
| 11014 | dump (dumper::TREE) |
| 11015 | && dump ("Read:%d clone %N of %N" , tag, DECL_NAME (res), target); |
| 11016 | else |
| 11017 | set_overrun (); |
| 11018 | } |
| 11019 | break; |
| 11020 | |
| 11021 | case tt_entity: |
| 11022 | /* Index into the entity table. Perhaps not loaded yet! */ |
| 11023 | { |
| 11024 | unsigned origin = state->slurp->remap_module (owner: u ()); |
| 11025 | unsigned ident = u (); |
| 11026 | module_state *from = (*modules)[origin]; |
| 11027 | |
| 11028 | if (!origin || ident >= from->entity_num) |
| 11029 | set_overrun (); |
| 11030 | if (!get_overrun ()) |
| 11031 | { |
| 11032 | binding_slot *slot = &(*entity_ary)[from->entity_lwm + ident]; |
| 11033 | if (slot->is_lazy ()) |
| 11034 | if (!from->lazy_load (index: ident, mslot: slot)) |
| 11035 | set_overrun (); |
| 11036 | res = *slot; |
| 11037 | } |
| 11038 | |
| 11039 | if (res) |
| 11040 | { |
| 11041 | const char *kind = (origin != state->mod ? "Imported" : "Named" ); |
| 11042 | int tag = insert (t: res); |
| 11043 | dump (dumper::TREE) |
| 11044 | && dump ("%s:%d %C:%N@%M" , kind, tag, TREE_CODE (res), |
| 11045 | res, (*modules)[origin]); |
| 11046 | |
| 11047 | if (!add_indirects (decl: res)) |
| 11048 | { |
| 11049 | set_overrun (); |
| 11050 | res = NULL_TREE; |
| 11051 | } |
| 11052 | } |
| 11053 | } |
| 11054 | break; |
| 11055 | |
| 11056 | case tt_template: |
| 11057 | /* A template. */ |
| 11058 | if (tree tpl = tree_node ()) |
| 11059 | { |
| 11060 | res = (TREE_CODE (tpl) == TU_LOCAL_ENTITY ? |
| 11061 | tpl : DECL_TEMPLATE_RESULT (tpl)); |
| 11062 | dump (dumper::TREE) |
| 11063 | && dump ("Read template %C:%N" , TREE_CODE (res), res); |
| 11064 | } |
| 11065 | break; |
| 11066 | } |
| 11067 | |
| 11068 | if (is_use && !unused && res && DECL_P (res) && !TREE_USED (res)) |
| 11069 | { |
| 11070 | /* Mark decl used as mark_used does -- we cannot call |
| 11071 | mark_used in the middle of streaming, we only need a subset |
| 11072 | of its functionality. */ |
| 11073 | TREE_USED (res) = true; |
| 11074 | |
| 11075 | /* And for structured bindings also the underlying decl. */ |
| 11076 | if (DECL_DECOMPOSITION_P (res) && !DECL_DECOMP_IS_BASE (res)) |
| 11077 | TREE_USED (DECL_DECOMP_BASE (res)) = true; |
| 11078 | |
| 11079 | if (DECL_CLONED_FUNCTION_P (res)) |
| 11080 | TREE_USED (DECL_CLONED_FUNCTION (res)) = true; |
| 11081 | } |
| 11082 | |
| 11083 | dump.outdent (); |
| 11084 | return res; |
| 11085 | } |
| 11086 | |
| 11087 | void |
| 11088 | trees_out::tpl_parms (tree parms, unsigned &tpl_levels) |
| 11089 | { |
| 11090 | if (!parms) |
| 11091 | return; |
| 11092 | |
| 11093 | if (TREE_VISITED (parms)) |
| 11094 | { |
| 11095 | ref_node (t: parms); |
| 11096 | return; |
| 11097 | } |
| 11098 | |
| 11099 | tpl_parms (TREE_CHAIN (parms), tpl_levels); |
| 11100 | |
| 11101 | tree vec = TREE_VALUE (parms); |
| 11102 | unsigned len = TREE_VEC_LENGTH (vec); |
| 11103 | /* Depth. */ |
| 11104 | int tag = insert (t: parms); |
| 11105 | if (streaming_p ()) |
| 11106 | { |
| 11107 | i (v: len + 1); |
| 11108 | dump (dumper::TREE) |
| 11109 | && dump ("Writing template parms:%d level:%N length:%d" , |
| 11110 | tag, TREE_PURPOSE (parms), len); |
| 11111 | } |
| 11112 | tree_node (TREE_PURPOSE (parms)); |
| 11113 | |
| 11114 | for (unsigned ix = 0; ix != len; ix++) |
| 11115 | { |
| 11116 | tree parm = TREE_VEC_ELT (vec, ix); |
| 11117 | tree decl = TREE_VALUE (parm); |
| 11118 | |
| 11119 | gcc_checking_assert (DECL_TEMPLATE_PARM_P (decl)); |
| 11120 | if (CHECKING_P) |
| 11121 | switch (TREE_CODE (decl)) |
| 11122 | { |
| 11123 | default: gcc_unreachable (); |
| 11124 | |
| 11125 | case TEMPLATE_DECL: |
| 11126 | gcc_assert ((TREE_CODE (TREE_TYPE (decl)) == TEMPLATE_TEMPLATE_PARM) |
| 11127 | && (TREE_CODE (DECL_TEMPLATE_RESULT (decl)) == TYPE_DECL) |
| 11128 | && (TYPE_NAME (TREE_TYPE (decl)) == decl)); |
| 11129 | break; |
| 11130 | |
| 11131 | case TYPE_DECL: |
| 11132 | gcc_assert ((TREE_CODE (TREE_TYPE (decl)) == TEMPLATE_TYPE_PARM) |
| 11133 | && (TYPE_NAME (TREE_TYPE (decl)) == decl)); |
| 11134 | break; |
| 11135 | |
| 11136 | case PARM_DECL: |
| 11137 | gcc_assert ((TREE_CODE (DECL_INITIAL (decl)) == TEMPLATE_PARM_INDEX) |
| 11138 | && (TREE_CODE (TEMPLATE_PARM_DECL (DECL_INITIAL (decl))) |
| 11139 | == CONST_DECL) |
| 11140 | && (DECL_TEMPLATE_PARM_P |
| 11141 | (TEMPLATE_PARM_DECL (DECL_INITIAL (decl))))); |
| 11142 | break; |
| 11143 | } |
| 11144 | |
| 11145 | tree_node (t: decl); |
| 11146 | tree_node (TEMPLATE_PARM_CONSTRAINTS (parm)); |
| 11147 | } |
| 11148 | |
| 11149 | tpl_levels++; |
| 11150 | } |
| 11151 | |
| 11152 | tree |
| 11153 | trees_in::tpl_parms (unsigned &tpl_levels) |
| 11154 | { |
| 11155 | tree parms = NULL_TREE; |
| 11156 | |
| 11157 | while (int len = i ()) |
| 11158 | { |
| 11159 | if (len < 0) |
| 11160 | { |
| 11161 | parms = back_ref (tag: len); |
| 11162 | continue; |
| 11163 | } |
| 11164 | |
| 11165 | len -= 1; |
| 11166 | parms = tree_cons (NULL_TREE, NULL_TREE, parms); |
| 11167 | int tag = insert (t: parms); |
| 11168 | TREE_PURPOSE (parms) = tree_node (); |
| 11169 | |
| 11170 | dump (dumper::TREE) |
| 11171 | && dump ("Reading template parms:%d level:%N length:%d" , |
| 11172 | tag, TREE_PURPOSE (parms), len); |
| 11173 | |
| 11174 | tree vec = make_tree_vec (len); |
| 11175 | for (int ix = 0; ix != len; ix++) |
| 11176 | { |
| 11177 | tree decl = tree_node (); |
| 11178 | if (!decl) |
| 11179 | return NULL_TREE; |
| 11180 | |
| 11181 | tree parm = build_tree_list (NULL, decl); |
| 11182 | TEMPLATE_PARM_CONSTRAINTS (parm) = tree_node (); |
| 11183 | |
| 11184 | TREE_VEC_ELT (vec, ix) = parm; |
| 11185 | } |
| 11186 | |
| 11187 | TREE_VALUE (parms) = vec; |
| 11188 | tpl_levels++; |
| 11189 | } |
| 11190 | |
| 11191 | return parms; |
| 11192 | } |
| 11193 | |
| 11194 | void |
| 11195 | trees_out::tpl_parms_fini (tree tmpl, unsigned tpl_levels) |
| 11196 | { |
| 11197 | for (tree parms = DECL_TEMPLATE_PARMS (tmpl); |
| 11198 | tpl_levels--; parms = TREE_CHAIN (parms)) |
| 11199 | { |
| 11200 | tree vec = TREE_VALUE (parms); |
| 11201 | |
| 11202 | tree_node (TREE_TYPE (vec)); |
| 11203 | for (unsigned ix = TREE_VEC_LENGTH (vec); ix--;) |
| 11204 | { |
| 11205 | tree parm = TREE_VEC_ELT (vec, ix); |
| 11206 | tree dflt = TREE_PURPOSE (parm); |
| 11207 | tree_node (t: dflt); |
| 11208 | |
| 11209 | /* Template template parameters need a context of their owning |
| 11210 | template. This is quite tricky to infer correctly on stream-in |
| 11211 | (see PR c++/98881) so we'll just provide it directly. */ |
| 11212 | tree decl = TREE_VALUE (parm); |
| 11213 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 11214 | tree_node (DECL_CONTEXT (decl)); |
| 11215 | } |
| 11216 | } |
| 11217 | } |
| 11218 | |
| 11219 | bool |
| 11220 | trees_in::tpl_parms_fini (tree tmpl, unsigned tpl_levels) |
| 11221 | { |
| 11222 | for (tree parms = DECL_TEMPLATE_PARMS (tmpl); |
| 11223 | tpl_levels--; parms = TREE_CHAIN (parms)) |
| 11224 | { |
| 11225 | tree vec = TREE_VALUE (parms); |
| 11226 | |
| 11227 | TREE_TYPE (vec) = tree_node (); |
| 11228 | for (unsigned ix = TREE_VEC_LENGTH (vec); ix--;) |
| 11229 | { |
| 11230 | tree parm = TREE_VEC_ELT (vec, ix); |
| 11231 | tree dflt = tree_node (); |
| 11232 | TREE_PURPOSE (parm) = dflt; |
| 11233 | |
| 11234 | tree decl = TREE_VALUE (parm); |
| 11235 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 11236 | DECL_CONTEXT (decl) = tree_node (); |
| 11237 | |
| 11238 | if (get_overrun ()) |
| 11239 | return false; |
| 11240 | } |
| 11241 | } |
| 11242 | return true; |
| 11243 | } |
| 11244 | |
| 11245 | /* PARMS is a LIST, one node per level. |
| 11246 | TREE_VALUE is a TREE_VEC of parm info for that level. |
| 11247 | each ELT is a TREE_LIST |
| 11248 | TREE_VALUE is PARM_DECL, TYPE_DECL or TEMPLATE_DECL |
| 11249 | TREE_PURPOSE is the default value. */ |
| 11250 | |
| 11251 | void |
| 11252 | trees_out:: (tree tpl, unsigned *tpl_levels) |
| 11253 | { |
| 11254 | tree parms = DECL_TEMPLATE_PARMS (tpl); |
| 11255 | tpl_parms (parms, tpl_levels&: *tpl_levels); |
| 11256 | |
| 11257 | /* Mark end. */ |
| 11258 | if (streaming_p ()) |
| 11259 | u (v: 0); |
| 11260 | |
| 11261 | if (*tpl_levels) |
| 11262 | tree_node (TEMPLATE_PARMS_CONSTRAINTS (parms)); |
| 11263 | } |
| 11264 | |
| 11265 | bool |
| 11266 | trees_in:: (tree tpl, unsigned *tpl_levels) |
| 11267 | { |
| 11268 | tree parms = tpl_parms (tpl_levels&: *tpl_levels); |
| 11269 | if (!parms) |
| 11270 | return false; |
| 11271 | |
| 11272 | DECL_TEMPLATE_PARMS (tpl) = parms; |
| 11273 | |
| 11274 | if (*tpl_levels) |
| 11275 | TEMPLATE_PARMS_CONSTRAINTS (parms) = tree_node (); |
| 11276 | |
| 11277 | return true; |
| 11278 | } |
| 11279 | |
| 11280 | /* Stream skeleton parm nodes, with their flags, type & parm indices. |
| 11281 | All the parms will have consecutive tags. */ |
| 11282 | |
| 11283 | void |
| 11284 | trees_out::fn_parms_init (tree fn) |
| 11285 | { |
| 11286 | /* First init them. */ |
| 11287 | int base_tag = ref_num - 1; |
| 11288 | int ix = 0; |
| 11289 | for (tree parm = DECL_ARGUMENTS (fn); |
| 11290 | parm; parm = DECL_CHAIN (parm), ix++) |
| 11291 | { |
| 11292 | if (streaming_p ()) |
| 11293 | { |
| 11294 | start (t: parm); |
| 11295 | tree_node_bools (t: parm); |
| 11296 | } |
| 11297 | int tag = insert (t: parm); |
| 11298 | gcc_checking_assert (base_tag - ix == tag); |
| 11299 | } |
| 11300 | /* Mark the end. */ |
| 11301 | if (streaming_p ()) |
| 11302 | u (v: 0); |
| 11303 | |
| 11304 | /* Now stream their contents. */ |
| 11305 | ix = 0; |
| 11306 | for (tree parm = DECL_ARGUMENTS (fn); |
| 11307 | parm; parm = DECL_CHAIN (parm), ix++) |
| 11308 | { |
| 11309 | if (streaming_p ()) |
| 11310 | dump (dumper::TREE) |
| 11311 | && dump ("Writing parm:%d %u (%N) of %N" , |
| 11312 | base_tag - ix, ix, parm, fn); |
| 11313 | tree_node_vals (t: parm); |
| 11314 | } |
| 11315 | |
| 11316 | if (!streaming_p ()) |
| 11317 | { |
| 11318 | /* We must walk contract specifiers so the dependency graph is |
| 11319 | complete. */ |
| 11320 | tree contract = get_fn_contract_specifiers (fn); |
| 11321 | for (; contract; contract = TREE_CHAIN (contract)) |
| 11322 | tree_node (t: contract); |
| 11323 | } |
| 11324 | |
| 11325 | /* Write a reference to contracts pre/post functions, if any, to avoid |
| 11326 | regenerating them in importers. */ |
| 11327 | tree_node (DECL_PRE_FN (fn)); |
| 11328 | tree_node (DECL_POST_FN (fn)); |
| 11329 | } |
| 11330 | |
| 11331 | /* Build skeleton parm nodes, read their flags, type & parm indices. */ |
| 11332 | |
| 11333 | int |
| 11334 | trees_in::fn_parms_init (tree fn) |
| 11335 | { |
| 11336 | int base_tag = ~(int)back_refs.length (); |
| 11337 | |
| 11338 | tree *parm_ptr = &DECL_ARGUMENTS (fn); |
| 11339 | int ix = 0; |
| 11340 | for (; int code = u (); ix++) |
| 11341 | { |
| 11342 | tree parm = start (code); |
| 11343 | if (!tree_node_bools (t: parm)) |
| 11344 | return 0; |
| 11345 | |
| 11346 | int tag = insert (t: parm); |
| 11347 | gcc_checking_assert (base_tag - ix == tag); |
| 11348 | *parm_ptr = parm; |
| 11349 | parm_ptr = &DECL_CHAIN (parm); |
| 11350 | } |
| 11351 | |
| 11352 | ix = 0; |
| 11353 | for (tree parm = DECL_ARGUMENTS (fn); |
| 11354 | parm; parm = DECL_CHAIN (parm), ix++) |
| 11355 | { |
| 11356 | dump (dumper::TREE) |
| 11357 | && dump ("Reading parm:%d %u (%N) of %N" , |
| 11358 | base_tag - ix, ix, parm, fn); |
| 11359 | if (!tree_node_vals (t: parm)) |
| 11360 | return 0; |
| 11361 | } |
| 11362 | |
| 11363 | /* Reload references to contract functions, if any. */ |
| 11364 | tree pre_fn = tree_node (); |
| 11365 | tree post_fn = tree_node (); |
| 11366 | set_contract_functions (fn, pre_fn, post_fn); |
| 11367 | |
| 11368 | return base_tag; |
| 11369 | } |
| 11370 | |
| 11371 | /* Read the remaining parm node data. Replace with existing (if |
| 11372 | non-null) in the map. */ |
| 11373 | |
| 11374 | void |
| 11375 | trees_in::fn_parms_fini (int tag, tree fn, tree existing, bool is_defn) |
| 11376 | { |
| 11377 | tree existing_parm = existing ? DECL_ARGUMENTS (existing) : NULL_TREE; |
| 11378 | tree parms = DECL_ARGUMENTS (fn); |
| 11379 | for (tree parm = parms; parm; parm = DECL_CHAIN (parm)) |
| 11380 | { |
| 11381 | if (existing_parm) |
| 11382 | { |
| 11383 | if (is_defn && !DECL_SAVED_TREE (existing)) |
| 11384 | { |
| 11385 | /* If we're about to become the definition, set the |
| 11386 | names of the parms from us. */ |
| 11387 | DECL_NAME (existing_parm) = DECL_NAME (parm); |
| 11388 | DECL_SOURCE_LOCATION (existing_parm) = DECL_SOURCE_LOCATION (parm); |
| 11389 | |
| 11390 | /* And some other flags important for codegen are only set |
| 11391 | by the definition. */ |
| 11392 | TREE_ADDRESSABLE (existing_parm) = TREE_ADDRESSABLE (parm); |
| 11393 | DECL_BY_REFERENCE (existing_parm) = DECL_BY_REFERENCE (parm); |
| 11394 | DECL_NONLOCAL (existing_parm) = DECL_NONLOCAL (parm); |
| 11395 | DECL_ARG_TYPE (existing_parm) = DECL_ARG_TYPE (parm); |
| 11396 | |
| 11397 | /* Invisiref parms had their types adjusted by cp_genericize. */ |
| 11398 | if (DECL_BY_REFERENCE (parm)) |
| 11399 | { |
| 11400 | TREE_TYPE (existing_parm) = TREE_TYPE (parm); |
| 11401 | relayout_decl (existing_parm); |
| 11402 | } |
| 11403 | } |
| 11404 | |
| 11405 | back_refs[~tag] = existing_parm; |
| 11406 | existing_parm = DECL_CHAIN (existing_parm); |
| 11407 | } |
| 11408 | tag--; |
| 11409 | } |
| 11410 | } |
| 11411 | |
| 11412 | /* Encode into KEY the position of the local type (class or enum) |
| 11413 | declaration DECL within FN. The position is encoded as the |
| 11414 | index of the innermost BLOCK (numbered in BFS order) along with |
| 11415 | the index within its BLOCK_VARS list. */ |
| 11416 | |
| 11417 | void |
| 11418 | trees_out::key_local_type (merge_key& key, tree decl, tree fn) |
| 11419 | { |
| 11420 | auto_vec<tree, 4> blocks; |
| 11421 | blocks.quick_push (DECL_INITIAL (fn)); |
| 11422 | unsigned block_ix = 0; |
| 11423 | while (block_ix != blocks.length ()) |
| 11424 | { |
| 11425 | tree block = blocks[block_ix]; |
| 11426 | unsigned decl_ix = 0; |
| 11427 | for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var)) |
| 11428 | { |
| 11429 | if (TREE_CODE (var) != TYPE_DECL) |
| 11430 | continue; |
| 11431 | if (var == decl) |
| 11432 | { |
| 11433 | key.index = (block_ix << 10) | decl_ix; |
| 11434 | return; |
| 11435 | } |
| 11436 | ++decl_ix; |
| 11437 | } |
| 11438 | for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) |
| 11439 | blocks.safe_push (obj: sub); |
| 11440 | ++block_ix; |
| 11441 | } |
| 11442 | |
| 11443 | /* Not-found value. */ |
| 11444 | key.index = 1023; |
| 11445 | } |
| 11446 | |
| 11447 | /* Look up the local type corresponding at the position encoded by |
| 11448 | KEY within FN and named NAME. */ |
| 11449 | |
| 11450 | tree |
| 11451 | trees_in::key_local_type (const merge_key& key, tree fn, tree name) |
| 11452 | { |
| 11453 | if (!DECL_INITIAL (fn)) |
| 11454 | return NULL_TREE; |
| 11455 | |
| 11456 | const unsigned block_pos = key.index >> 10; |
| 11457 | const unsigned decl_pos = key.index & 1023; |
| 11458 | |
| 11459 | if (decl_pos == 1023) |
| 11460 | return NULL_TREE; |
| 11461 | |
| 11462 | auto_vec<tree, 4> blocks; |
| 11463 | blocks.quick_push (DECL_INITIAL (fn)); |
| 11464 | unsigned block_ix = 0; |
| 11465 | while (block_ix != blocks.length ()) |
| 11466 | { |
| 11467 | tree block = blocks[block_ix]; |
| 11468 | if (block_ix == block_pos) |
| 11469 | { |
| 11470 | unsigned decl_ix = 0; |
| 11471 | for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var)) |
| 11472 | { |
| 11473 | if (TREE_CODE (var) != TYPE_DECL) |
| 11474 | continue; |
| 11475 | /* Prefer using the identifier as the key for more robustness |
| 11476 | to ODR violations, except for anonymous types since their |
| 11477 | compiler-generated identifiers aren't stable. */ |
| 11478 | if (IDENTIFIER_ANON_P (name) |
| 11479 | ? decl_ix == decl_pos |
| 11480 | : DECL_NAME (var) == name) |
| 11481 | return var; |
| 11482 | ++decl_ix; |
| 11483 | } |
| 11484 | return NULL_TREE; |
| 11485 | } |
| 11486 | for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) |
| 11487 | blocks.safe_push (obj: sub); |
| 11488 | ++block_ix; |
| 11489 | } |
| 11490 | |
| 11491 | return NULL_TREE; |
| 11492 | } |
| 11493 | |
| 11494 | /* DEP is the depset of some decl we're streaming by value. Determine |
| 11495 | the merging behaviour. */ |
| 11496 | |
| 11497 | merge_kind |
| 11498 | trees_out::get_merge_kind (tree decl, depset *dep) |
| 11499 | { |
| 11500 | if (!dep) |
| 11501 | { |
| 11502 | if (VAR_OR_FUNCTION_DECL_P (decl)) |
| 11503 | { |
| 11504 | /* Any var or function with template info should have DEP. */ |
| 11505 | gcc_checking_assert (!DECL_LANG_SPECIFIC (decl) |
| 11506 | || !DECL_TEMPLATE_INFO (decl)); |
| 11507 | if (DECL_LOCAL_DECL_P (decl)) |
| 11508 | return MK_unique; |
| 11509 | } |
| 11510 | |
| 11511 | /* Either unique, or some member of a class that cannot have an |
| 11512 | out-of-class definition. For instance a FIELD_DECL. */ |
| 11513 | tree ctx = CP_DECL_CONTEXT (decl); |
| 11514 | if (TREE_CODE (ctx) == FUNCTION_DECL) |
| 11515 | { |
| 11516 | /* USING_DECLs and NAMESPACE_DECLs cannot have DECL_TEMPLATE_INFO -- |
| 11517 | this isn't permitting them to have one. */ |
| 11518 | gcc_checking_assert (TREE_CODE (decl) == USING_DECL |
| 11519 | || TREE_CODE (decl) == NAMESPACE_DECL |
| 11520 | || !DECL_LANG_SPECIFIC (decl) |
| 11521 | || !DECL_TEMPLATE_INFO (decl)); |
| 11522 | |
| 11523 | return MK_unique; |
| 11524 | } |
| 11525 | |
| 11526 | if (TREE_CODE (decl) == TEMPLATE_DECL |
| 11527 | && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl)) |
| 11528 | return MK_local_friend; |
| 11529 | |
| 11530 | gcc_checking_assert (TYPE_P (ctx)); |
| 11531 | |
| 11532 | /* Internal-only types will not need to dedup their members. */ |
| 11533 | if (!DECL_CONTEXT (TYPE_NAME (ctx))) |
| 11534 | return MK_unique; |
| 11535 | |
| 11536 | if (TREE_CODE (decl) == USING_DECL) |
| 11537 | return MK_field; |
| 11538 | |
| 11539 | if (TREE_CODE (decl) == FIELD_DECL) |
| 11540 | { |
| 11541 | if (DECL_NAME (decl)) |
| 11542 | { |
| 11543 | /* Anonymous FIELD_DECLs have a NULL name. */ |
| 11544 | gcc_checking_assert (!IDENTIFIER_ANON_P (DECL_NAME (decl))); |
| 11545 | return MK_named; |
| 11546 | } |
| 11547 | |
| 11548 | if (walking_bit_field_unit) |
| 11549 | { |
| 11550 | /* The underlying storage unit for a bitfield. We do not |
| 11551 | need to dedup it, because it's only reachable through |
| 11552 | the bitfields it represents. And those are deduped. */ |
| 11553 | // FIXME: Is that assertion correct -- do we ever fish it |
| 11554 | // out and put it in an expr? |
| 11555 | gcc_checking_assert (!DECL_NAME (decl) |
| 11556 | && !RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)) |
| 11557 | && !DECL_BIT_FIELD_REPRESENTATIVE (decl)); |
| 11558 | gcc_checking_assert ((TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE |
| 11559 | ? TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) |
| 11560 | : TREE_CODE (TREE_TYPE (decl))) |
| 11561 | == INTEGER_TYPE); |
| 11562 | return MK_unique; |
| 11563 | } |
| 11564 | |
| 11565 | return MK_field; |
| 11566 | } |
| 11567 | |
| 11568 | if (TREE_CODE (decl) == CONST_DECL) |
| 11569 | return MK_named; |
| 11570 | |
| 11571 | if (TREE_CODE (decl) == VAR_DECL |
| 11572 | && DECL_VTABLE_OR_VTT_P (decl)) |
| 11573 | return MK_vtable; |
| 11574 | |
| 11575 | if (DECL_THUNK_P (decl)) |
| 11576 | /* Thunks are unique-enough, because they're only referenced |
| 11577 | from the vtable. And that's either new (so we want the |
| 11578 | thunks), or it's a duplicate (so it will be dropped). */ |
| 11579 | return MK_unique; |
| 11580 | |
| 11581 | /* There should be no other cases. */ |
| 11582 | gcc_unreachable (); |
| 11583 | } |
| 11584 | |
| 11585 | gcc_checking_assert (TREE_CODE (decl) != FIELD_DECL |
| 11586 | && TREE_CODE (decl) != USING_DECL |
| 11587 | && TREE_CODE (decl) != CONST_DECL); |
| 11588 | |
| 11589 | if (is_key_order ()) |
| 11590 | { |
| 11591 | /* When doing the mergeablilty graph, there's an indirection to |
| 11592 | the actual depset. */ |
| 11593 | gcc_assert (dep->is_special ()); |
| 11594 | dep = dep->deps[0]; |
| 11595 | } |
| 11596 | |
| 11597 | gcc_checking_assert (decl == dep->get_entity ()); |
| 11598 | |
| 11599 | merge_kind mk = MK_named; |
| 11600 | switch (dep->get_entity_kind ()) |
| 11601 | { |
| 11602 | default: |
| 11603 | gcc_unreachable (); |
| 11604 | |
| 11605 | case depset::EK_PARTIAL: |
| 11606 | mk = MK_partial; |
| 11607 | break; |
| 11608 | |
| 11609 | case depset::EK_DECL: |
| 11610 | { |
| 11611 | tree ctx = CP_DECL_CONTEXT (decl); |
| 11612 | |
| 11613 | switch (TREE_CODE (ctx)) |
| 11614 | { |
| 11615 | default: |
| 11616 | gcc_unreachable (); |
| 11617 | |
| 11618 | case FUNCTION_DECL: |
| 11619 | gcc_checking_assert |
| 11620 | (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl))); |
| 11621 | |
| 11622 | if (has_definition (decl: ctx)) |
| 11623 | mk = MK_local_type; |
| 11624 | else |
| 11625 | /* We're not providing a definition of the context to key |
| 11626 | the local type into; use the keyed map instead. */ |
| 11627 | mk = MK_keyed; |
| 11628 | break; |
| 11629 | |
| 11630 | case RECORD_TYPE: |
| 11631 | case UNION_TYPE: |
| 11632 | case NAMESPACE_DECL: |
| 11633 | if (DECL_NAME (decl) == as_base_identifier) |
| 11634 | { |
| 11635 | mk = MK_as_base; |
| 11636 | break; |
| 11637 | } |
| 11638 | |
| 11639 | /* A lambda may have a class as its context, even though it |
| 11640 | isn't a member in the traditional sense; see the test |
| 11641 | g++.dg/modules/lambda-6_a.C. */ |
| 11642 | if (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl)) |
| 11643 | && LAMBDA_TYPE_P (TREE_TYPE (decl))) |
| 11644 | { |
| 11645 | if (get_keyed_decl_scope (decl)) |
| 11646 | mk = MK_keyed; |
| 11647 | else |
| 11648 | /* Lambdas not attached to any mangling scope are TU-local |
| 11649 | and so cannot be deduplicated. */ |
| 11650 | mk = MK_unique; |
| 11651 | break; |
| 11652 | } |
| 11653 | |
| 11654 | if (TREE_CODE (decl) == TEMPLATE_DECL |
| 11655 | ? DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl) |
| 11656 | : decl_specialization_friend_p (decl)) |
| 11657 | { |
| 11658 | mk = MK_local_friend; |
| 11659 | break; |
| 11660 | } |
| 11661 | |
| 11662 | if (DECL_DECOMPOSITION_P (decl)) |
| 11663 | { |
| 11664 | mk = MK_unique; |
| 11665 | break; |
| 11666 | } |
| 11667 | |
| 11668 | if (IDENTIFIER_ANON_P (DECL_NAME (decl))) |
| 11669 | { |
| 11670 | if (RECORD_OR_UNION_TYPE_P (ctx)) |
| 11671 | mk = MK_field; |
| 11672 | else if (DECL_IMPLICIT_TYPEDEF_P (decl) |
| 11673 | && UNSCOPED_ENUM_P (TREE_TYPE (decl)) |
| 11674 | && TYPE_VALUES (TREE_TYPE (decl))) |
| 11675 | /* Keyed by first enum value, and underlying type. */ |
| 11676 | mk = MK_enum; |
| 11677 | else |
| 11678 | /* No way to merge it, it is an ODR land-mine. */ |
| 11679 | mk = MK_unique; |
| 11680 | } |
| 11681 | } |
| 11682 | } |
| 11683 | break; |
| 11684 | |
| 11685 | case depset::EK_SPECIALIZATION: |
| 11686 | { |
| 11687 | gcc_checking_assert (dep->is_special ()); |
| 11688 | |
| 11689 | if (TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL) |
| 11690 | /* An block-scope classes of templates are themselves |
| 11691 | templates. */ |
| 11692 | gcc_checking_assert (DECL_IMPLICIT_TYPEDEF_P (decl)); |
| 11693 | |
| 11694 | if (dep->is_friend_spec ()) |
| 11695 | mk = MK_friend_spec; |
| 11696 | else if (dep->is_type_spec ()) |
| 11697 | mk = MK_type_spec; |
| 11698 | else |
| 11699 | mk = MK_decl_spec; |
| 11700 | |
| 11701 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 11702 | { |
| 11703 | spec_entry *entry = reinterpret_cast <spec_entry *> (dep->deps[0]); |
| 11704 | if (TREE_CODE (entry->spec) != TEMPLATE_DECL) |
| 11705 | mk = merge_kind (mk | MK_tmpl_tmpl_mask); |
| 11706 | } |
| 11707 | } |
| 11708 | break; |
| 11709 | } |
| 11710 | |
| 11711 | return mk; |
| 11712 | } |
| 11713 | |
| 11714 | |
| 11715 | /* The container of DECL -- not necessarily its context! */ |
| 11716 | |
| 11717 | tree |
| 11718 | trees_out::decl_container (tree decl) |
| 11719 | { |
| 11720 | int use_tpl; |
| 11721 | tree tpl = NULL_TREE; |
| 11722 | if (tree template_info = node_template_info (decl, use&: use_tpl)) |
| 11723 | tpl = TI_TEMPLATE (template_info); |
| 11724 | if (tpl == decl) |
| 11725 | tpl = nullptr; |
| 11726 | |
| 11727 | /* Stream the template we're instantiated from. */ |
| 11728 | tree_node (t: tpl); |
| 11729 | |
| 11730 | tree container = NULL_TREE; |
| 11731 | if (TREE_CODE (decl) == TEMPLATE_DECL |
| 11732 | ? DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl) |
| 11733 | : decl_specialization_friend_p (decl)) |
| 11734 | container = DECL_CHAIN (decl); |
| 11735 | else |
| 11736 | container = CP_DECL_CONTEXT (decl); |
| 11737 | |
| 11738 | if (TYPE_P (container)) |
| 11739 | container = TYPE_NAME (container); |
| 11740 | |
| 11741 | tree_node (t: container); |
| 11742 | |
| 11743 | return container; |
| 11744 | } |
| 11745 | |
| 11746 | tree |
| 11747 | trees_in::decl_container () |
| 11748 | { |
| 11749 | /* The maybe-template. */ |
| 11750 | (void)tree_node (); |
| 11751 | |
| 11752 | tree container = tree_node (); |
| 11753 | |
| 11754 | return container; |
| 11755 | } |
| 11756 | |
| 11757 | /* Gets a 2-bit discriminator to distinguish coroutine actor or destroy |
| 11758 | functions from a normal function. */ |
| 11759 | |
| 11760 | static int |
| 11761 | get_coroutine_discriminator (tree inner) |
| 11762 | { |
| 11763 | if (DECL_COROUTINE_P (inner)) |
| 11764 | if (tree ramp = DECL_RAMP_FN (inner)) |
| 11765 | { |
| 11766 | if (DECL_ACTOR_FN (ramp) == inner) |
| 11767 | return 1; |
| 11768 | else if (DECL_DESTROY_FN (ramp) == inner) |
| 11769 | return 2; |
| 11770 | else |
| 11771 | gcc_unreachable (); |
| 11772 | } |
| 11773 | return 0; |
| 11774 | } |
| 11775 | |
| 11776 | /* Write out key information about a mergeable DEP. Does not write |
| 11777 | the contents of DEP itself. The context has already been |
| 11778 | written. The container has already been streamed. */ |
| 11779 | |
| 11780 | void |
| 11781 | trees_out::key_mergeable (int tag, merge_kind mk, tree decl, tree inner, |
| 11782 | tree container, depset *dep) |
| 11783 | { |
| 11784 | if (dep && is_key_order ()) |
| 11785 | { |
| 11786 | gcc_checking_assert (dep->is_special ()); |
| 11787 | dep = dep->deps[0]; |
| 11788 | } |
| 11789 | |
| 11790 | if (streaming_p ()) |
| 11791 | dump (dumper::MERGE) |
| 11792 | && dump ("Writing:%d's %s merge key (%s) %C:%N" , tag, merge_kind_name[mk], |
| 11793 | dep ? dep->entity_kind_name () : "contained" , |
| 11794 | TREE_CODE (decl), decl); |
| 11795 | |
| 11796 | /* Now write the locating information. */ |
| 11797 | if (mk & MK_template_mask) |
| 11798 | { |
| 11799 | /* Specializations are located via their originating template, |
| 11800 | and the set of template args they specialize. */ |
| 11801 | gcc_checking_assert (dep && dep->is_special ()); |
| 11802 | spec_entry *entry = reinterpret_cast <spec_entry *> (dep->deps[0]); |
| 11803 | |
| 11804 | tree_node (t: entry->tmpl); |
| 11805 | tree_node (t: entry->args); |
| 11806 | if (mk & MK_tmpl_decl_mask) |
| 11807 | if (flag_concepts && TREE_CODE (inner) == VAR_DECL) |
| 11808 | { |
| 11809 | /* Variable template partial specializations might need |
| 11810 | constraints (see spec_hasher::equal). It's simpler to |
| 11811 | write NULL when we don't need them. */ |
| 11812 | tree constraints = NULL_TREE; |
| 11813 | |
| 11814 | if (uses_template_parms (entry->args)) |
| 11815 | constraints = get_constraints (inner); |
| 11816 | tree_node (t: constraints); |
| 11817 | } |
| 11818 | |
| 11819 | if (CHECKING_P) |
| 11820 | { |
| 11821 | /* Make sure we can locate the decl. */ |
| 11822 | tree existing = match_mergeable_specialization |
| 11823 | (is_decl: bool (mk & MK_tmpl_decl_mask), entry); |
| 11824 | |
| 11825 | gcc_assert (existing); |
| 11826 | if (mk & MK_tmpl_decl_mask) |
| 11827 | { |
| 11828 | if (mk & MK_tmpl_tmpl_mask) |
| 11829 | existing = DECL_TI_TEMPLATE (existing); |
| 11830 | } |
| 11831 | else |
| 11832 | { |
| 11833 | if (mk & MK_tmpl_tmpl_mask) |
| 11834 | existing = CLASSTYPE_TI_TEMPLATE (existing); |
| 11835 | else |
| 11836 | existing = TYPE_NAME (existing); |
| 11837 | } |
| 11838 | |
| 11839 | /* The walkabout should have found ourselves. */ |
| 11840 | gcc_checking_assert (TREE_CODE (decl) == TYPE_DECL |
| 11841 | ? same_type_p (TREE_TYPE (decl), |
| 11842 | TREE_TYPE (existing)) |
| 11843 | : existing == decl); |
| 11844 | } |
| 11845 | } |
| 11846 | else if (mk != MK_unique) |
| 11847 | { |
| 11848 | merge_key key; |
| 11849 | tree name = DECL_NAME (decl); |
| 11850 | |
| 11851 | switch (mk) |
| 11852 | { |
| 11853 | default: |
| 11854 | gcc_unreachable (); |
| 11855 | |
| 11856 | case MK_named: |
| 11857 | case MK_friend_spec: |
| 11858 | if (IDENTIFIER_CONV_OP_P (name)) |
| 11859 | name = conv_op_identifier; |
| 11860 | |
| 11861 | if (TREE_CODE (inner) == FUNCTION_DECL) |
| 11862 | { |
| 11863 | /* Functions are distinguished by parameter types. */ |
| 11864 | tree fn_type = TREE_TYPE (inner); |
| 11865 | |
| 11866 | key.ref_q = type_memfn_rqual (fn_type); |
| 11867 | key.coro_disc = get_coroutine_discriminator (inner); |
| 11868 | key.args = TYPE_ARG_TYPES (fn_type); |
| 11869 | |
| 11870 | if (tree reqs = get_constraints (inner)) |
| 11871 | { |
| 11872 | if (cxx_dialect < cxx20) |
| 11873 | reqs = CI_ASSOCIATED_CONSTRAINTS (reqs); |
| 11874 | else |
| 11875 | reqs = CI_DECLARATOR_REQS (reqs); |
| 11876 | key.constraints = reqs; |
| 11877 | } |
| 11878 | |
| 11879 | if (IDENTIFIER_CONV_OP_P (name) |
| 11880 | || (decl != inner |
| 11881 | && !(name == fun_identifier |
| 11882 | /* In case the user names something _FUN */ |
| 11883 | && LAMBDA_TYPE_P (DECL_CONTEXT (inner))))) |
| 11884 | /* And a function template, or conversion operator needs |
| 11885 | the return type. Except for the _FUN thunk of a |
| 11886 | generic lambda, which has a recursive decl_type'd |
| 11887 | return type. */ |
| 11888 | // FIXME: What if the return type is a voldemort? |
| 11889 | key.ret = fndecl_declared_return_type (inner); |
| 11890 | } |
| 11891 | break; |
| 11892 | |
| 11893 | case MK_field: |
| 11894 | { |
| 11895 | unsigned ix = 0; |
| 11896 | if (TREE_CODE (inner) != FIELD_DECL) |
| 11897 | name = NULL_TREE; |
| 11898 | else |
| 11899 | gcc_checking_assert (!name || !IDENTIFIER_ANON_P (name)); |
| 11900 | |
| 11901 | for (tree field = TYPE_FIELDS (TREE_TYPE (container)); |
| 11902 | ; field = DECL_CHAIN (field)) |
| 11903 | { |
| 11904 | tree finner = STRIP_TEMPLATE (field); |
| 11905 | if (TREE_CODE (finner) == TREE_CODE (inner)) |
| 11906 | { |
| 11907 | if (finner == inner) |
| 11908 | break; |
| 11909 | ix++; |
| 11910 | } |
| 11911 | } |
| 11912 | key.index = ix; |
| 11913 | } |
| 11914 | break; |
| 11915 | |
| 11916 | case MK_vtable: |
| 11917 | { |
| 11918 | tree vtable = CLASSTYPE_VTABLES (TREE_TYPE (container)); |
| 11919 | for (unsigned ix = 0; ; vtable = DECL_CHAIN (vtable), ix++) |
| 11920 | if (vtable == decl) |
| 11921 | { |
| 11922 | key.index = ix; |
| 11923 | break; |
| 11924 | } |
| 11925 | name = NULL_TREE; |
| 11926 | } |
| 11927 | break; |
| 11928 | |
| 11929 | case MK_as_base: |
| 11930 | gcc_checking_assert |
| 11931 | (decl == TYPE_NAME (CLASSTYPE_AS_BASE (TREE_TYPE (container)))); |
| 11932 | break; |
| 11933 | |
| 11934 | case MK_local_friend: |
| 11935 | { |
| 11936 | /* Find by index on the class's DECL_LIST. We set TREE_CHAIN to |
| 11937 | point to the class in push_template_decl or grokfndecl. */ |
| 11938 | unsigned ix = 0; |
| 11939 | for (tree decls = CLASSTYPE_DECL_LIST (TREE_CHAIN (decl)); |
| 11940 | decls; decls = TREE_CHAIN (decls)) |
| 11941 | if (!TREE_PURPOSE (decls)) |
| 11942 | { |
| 11943 | tree frnd = friend_from_decl_list (TREE_VALUE (decls)); |
| 11944 | if (frnd == decl) |
| 11945 | break; |
| 11946 | ix++; |
| 11947 | } |
| 11948 | key.index = ix; |
| 11949 | name = NULL_TREE; |
| 11950 | } |
| 11951 | break; |
| 11952 | |
| 11953 | case MK_local_type: |
| 11954 | key_local_type (key, STRIP_TEMPLATE (decl), fn: container); |
| 11955 | break; |
| 11956 | |
| 11957 | case MK_enum: |
| 11958 | { |
| 11959 | /* Anonymous enums are located by their first identifier, |
| 11960 | and underlying type. */ |
| 11961 | tree type = TREE_TYPE (decl); |
| 11962 | |
| 11963 | gcc_checking_assert (UNSCOPED_ENUM_P (type)); |
| 11964 | /* Using the type name drops the bit precision we might |
| 11965 | have been using on the enum. */ |
| 11966 | key.ret = TYPE_NAME (ENUM_UNDERLYING_TYPE (type)); |
| 11967 | if (tree values = TYPE_VALUES (type)) |
| 11968 | name = DECL_NAME (TREE_VALUE (values)); |
| 11969 | } |
| 11970 | break; |
| 11971 | |
| 11972 | case MK_keyed: |
| 11973 | { |
| 11974 | tree scope = get_keyed_decl_scope (inner); |
| 11975 | gcc_checking_assert (scope); |
| 11976 | |
| 11977 | auto *root = keyed_table->get (k: scope); |
| 11978 | unsigned ix = root->length (); |
| 11979 | /* If we don't find it, we'll write a really big number |
| 11980 | that the reader will ignore. */ |
| 11981 | while (ix--) |
| 11982 | if ((*root)[ix] == inner) |
| 11983 | break; |
| 11984 | |
| 11985 | /* Use the keyed-to decl as the 'name'. */ |
| 11986 | name = scope; |
| 11987 | key.index = ix; |
| 11988 | } |
| 11989 | break; |
| 11990 | |
| 11991 | case MK_partial: |
| 11992 | { |
| 11993 | tree ti = get_template_info (inner); |
| 11994 | key.constraints = get_constraints (inner); |
| 11995 | key.ret = TI_TEMPLATE (ti); |
| 11996 | key.args = TI_ARGS (ti); |
| 11997 | } |
| 11998 | break; |
| 11999 | } |
| 12000 | |
| 12001 | tree_node (t: name); |
| 12002 | if (streaming_p ()) |
| 12003 | { |
| 12004 | /* Check we have enough bits for the index. */ |
| 12005 | gcc_checking_assert (key.index < (1u << (sizeof (unsigned) * 8 - 4))); |
| 12006 | |
| 12007 | unsigned code = ((key.ref_q << 0) |
| 12008 | | (key.coro_disc << 2) |
| 12009 | | (key.index << 4)); |
| 12010 | u (v: code); |
| 12011 | } |
| 12012 | |
| 12013 | if (mk == MK_enum) |
| 12014 | tree_node (t: key.ret); |
| 12015 | else if (mk == MK_partial |
| 12016 | || (mk == MK_named && inner |
| 12017 | && TREE_CODE (inner) == FUNCTION_DECL)) |
| 12018 | { |
| 12019 | tree_node (t: key.ret); |
| 12020 | tree arg = key.args; |
| 12021 | if (mk == MK_named) |
| 12022 | while (arg && arg != void_list_node) |
| 12023 | { |
| 12024 | tree_node (TREE_VALUE (arg)); |
| 12025 | arg = TREE_CHAIN (arg); |
| 12026 | } |
| 12027 | tree_node (t: arg); |
| 12028 | tree_node (t: key.constraints); |
| 12029 | } |
| 12030 | } |
| 12031 | } |
| 12032 | |
| 12033 | /* DECL is a new declaration that may be duplicated in OVL. Use KEY |
| 12034 | to find its clone, or NULL. If DECL's DECL_NAME is NULL, this |
| 12035 | has been found by a proxy. It will be an enum type located by its |
| 12036 | first member. |
| 12037 | |
| 12038 | We're conservative with matches, so ambiguous decls will be |
| 12039 | registered as different, then lead to a lookup error if the two |
| 12040 | modules are both visible. Perhaps we want to do something similar |
| 12041 | to duplicate decls to get ODR errors on loading? We already have |
| 12042 | some special casing for namespaces. */ |
| 12043 | |
| 12044 | static tree |
| 12045 | check_mergeable_decl (merge_kind mk, tree decl, tree ovl, merge_key const &key) |
| 12046 | { |
| 12047 | tree found = NULL_TREE; |
| 12048 | for (ovl_iterator iter (ovl); !found && iter; ++iter) |
| 12049 | { |
| 12050 | tree match = *iter; |
| 12051 | |
| 12052 | tree d_inner = decl; |
| 12053 | tree m_inner = match; |
| 12054 | |
| 12055 | again: |
| 12056 | if (TREE_CODE (d_inner) != TREE_CODE (m_inner)) |
| 12057 | { |
| 12058 | if (TREE_CODE (match) == NAMESPACE_DECL |
| 12059 | && !DECL_NAMESPACE_ALIAS (match)) |
| 12060 | /* Namespaces are never overloaded. */ |
| 12061 | found = match; |
| 12062 | |
| 12063 | continue; |
| 12064 | } |
| 12065 | |
| 12066 | switch (TREE_CODE (d_inner)) |
| 12067 | { |
| 12068 | case TEMPLATE_DECL: |
| 12069 | if (template_heads_equivalent_p (d_inner, m_inner)) |
| 12070 | { |
| 12071 | d_inner = DECL_TEMPLATE_RESULT (d_inner); |
| 12072 | m_inner = DECL_TEMPLATE_RESULT (m_inner); |
| 12073 | if (d_inner == error_mark_node |
| 12074 | && TYPE_DECL_ALIAS_P (m_inner)) |
| 12075 | { |
| 12076 | found = match; |
| 12077 | break; |
| 12078 | } |
| 12079 | goto again; |
| 12080 | } |
| 12081 | break; |
| 12082 | |
| 12083 | case FUNCTION_DECL: |
| 12084 | if (tree m_type = TREE_TYPE (m_inner)) |
| 12085 | if ((!key.ret |
| 12086 | || same_type_p (key.ret, fndecl_declared_return_type (m_inner))) |
| 12087 | && type_memfn_rqual (m_type) == key.ref_q |
| 12088 | && compparms (key.args, TYPE_ARG_TYPES (m_type)) |
| 12089 | && get_coroutine_discriminator (inner: m_inner) == key.coro_disc |
| 12090 | /* Reject if old is a "C" builtin and new is not "C". |
| 12091 | Matches decls_match behaviour. */ |
| 12092 | && (!DECL_IS_UNDECLARED_BUILTIN (m_inner) |
| 12093 | || !DECL_EXTERN_C_P (m_inner) |
| 12094 | || DECL_EXTERN_C_P (d_inner)) |
| 12095 | /* Reject if one is a different member of a |
| 12096 | guarded/pre/post fn set. */ |
| 12097 | && (!flag_contracts |
| 12098 | || (DECL_IS_PRE_FN_P (d_inner) |
| 12099 | == DECL_IS_PRE_FN_P (m_inner))) |
| 12100 | && (!flag_contracts |
| 12101 | || (DECL_IS_POST_FN_P (d_inner) |
| 12102 | == DECL_IS_POST_FN_P (m_inner)))) |
| 12103 | { |
| 12104 | tree m_reqs = get_constraints (m_inner); |
| 12105 | if (m_reqs) |
| 12106 | { |
| 12107 | if (cxx_dialect < cxx20) |
| 12108 | m_reqs = CI_ASSOCIATED_CONSTRAINTS (m_reqs); |
| 12109 | else |
| 12110 | m_reqs = CI_DECLARATOR_REQS (m_reqs); |
| 12111 | } |
| 12112 | |
| 12113 | if (cp_tree_equal (key.constraints, m_reqs)) |
| 12114 | found = match; |
| 12115 | } |
| 12116 | break; |
| 12117 | |
| 12118 | case TYPE_DECL: |
| 12119 | if (DECL_IMPLICIT_TYPEDEF_P (d_inner) |
| 12120 | == DECL_IMPLICIT_TYPEDEF_P (m_inner)) |
| 12121 | { |
| 12122 | if (!IDENTIFIER_ANON_P (DECL_NAME (m_inner))) |
| 12123 | return match; |
| 12124 | else if (mk == MK_enum |
| 12125 | && (TYPE_NAME (ENUM_UNDERLYING_TYPE (TREE_TYPE (m_inner))) |
| 12126 | == key.ret)) |
| 12127 | found = match; |
| 12128 | } |
| 12129 | break; |
| 12130 | |
| 12131 | default: |
| 12132 | found = match; |
| 12133 | break; |
| 12134 | } |
| 12135 | } |
| 12136 | |
| 12137 | return found; |
| 12138 | } |
| 12139 | |
| 12140 | /* DECL, INNER & TYPE are a skeleton set of nodes for a decl. Only |
| 12141 | the bools have been filled in. Read its merging key and merge it. |
| 12142 | Returns the existing decl if there is one. */ |
| 12143 | |
| 12144 | tree |
| 12145 | trees_in::key_mergeable (int tag, merge_kind mk, tree decl, tree inner, |
| 12146 | tree type, tree container, bool is_attached, |
| 12147 | bool is_imported_temploid_friend) |
| 12148 | { |
| 12149 | const char *kind = "new" ; |
| 12150 | tree existing = NULL_TREE; |
| 12151 | |
| 12152 | if (mk & MK_template_mask) |
| 12153 | { |
| 12154 | // FIXME: We could stream the specialization hash? |
| 12155 | spec_entry spec; |
| 12156 | spec.tmpl = tree_node (); |
| 12157 | spec.args = tree_node (); |
| 12158 | |
| 12159 | if (get_overrun ()) |
| 12160 | return error_mark_node; |
| 12161 | |
| 12162 | DECL_NAME (decl) = DECL_NAME (spec.tmpl); |
| 12163 | DECL_CONTEXT (decl) = DECL_CONTEXT (spec.tmpl); |
| 12164 | DECL_NAME (inner) = DECL_NAME (decl); |
| 12165 | DECL_CONTEXT (inner) = DECL_CONTEXT (decl); |
| 12166 | |
| 12167 | tree constr = NULL_TREE; |
| 12168 | bool is_decl = mk & MK_tmpl_decl_mask; |
| 12169 | if (is_decl) |
| 12170 | { |
| 12171 | if (flag_concepts && TREE_CODE (inner) == VAR_DECL) |
| 12172 | { |
| 12173 | constr = tree_node (); |
| 12174 | if (constr) |
| 12175 | set_constraints (inner, constr); |
| 12176 | } |
| 12177 | spec.spec = (mk & MK_tmpl_tmpl_mask) ? inner : decl; |
| 12178 | } |
| 12179 | else |
| 12180 | spec.spec = type; |
| 12181 | existing = match_mergeable_specialization (is_decl, &spec); |
| 12182 | if (constr) |
| 12183 | /* We'll add these back later, if this is the new decl. */ |
| 12184 | remove_constraints (inner); |
| 12185 | |
| 12186 | if (!existing) |
| 12187 | ; /* We'll add to the table once read. */ |
| 12188 | else if (mk & MK_tmpl_decl_mask) |
| 12189 | { |
| 12190 | /* A declaration specialization. */ |
| 12191 | if (mk & MK_tmpl_tmpl_mask) |
| 12192 | existing = DECL_TI_TEMPLATE (existing); |
| 12193 | } |
| 12194 | else |
| 12195 | { |
| 12196 | /* A type specialization. */ |
| 12197 | if (mk & MK_tmpl_tmpl_mask) |
| 12198 | existing = CLASSTYPE_TI_TEMPLATE (existing); |
| 12199 | else |
| 12200 | existing = TYPE_NAME (existing); |
| 12201 | } |
| 12202 | } |
| 12203 | else if (mk == MK_unique) |
| 12204 | kind = "unique" ; |
| 12205 | else |
| 12206 | { |
| 12207 | tree name = tree_node (); |
| 12208 | |
| 12209 | merge_key key; |
| 12210 | unsigned code = u (); |
| 12211 | key.ref_q = cp_ref_qualifier ((code >> 0) & 3); |
| 12212 | key.coro_disc = (code >> 2) & 3; |
| 12213 | key.index = code >> 4; |
| 12214 | |
| 12215 | if (mk == MK_enum) |
| 12216 | key.ret = tree_node (); |
| 12217 | else if (mk == MK_partial |
| 12218 | || ((mk == MK_named || mk == MK_friend_spec) |
| 12219 | && TREE_CODE (inner) == FUNCTION_DECL)) |
| 12220 | { |
| 12221 | key.ret = tree_node (); |
| 12222 | tree arg, *arg_ptr = &key.args; |
| 12223 | while ((arg = tree_node ()) |
| 12224 | && arg != void_list_node |
| 12225 | && mk != MK_partial) |
| 12226 | { |
| 12227 | *arg_ptr = tree_cons (NULL_TREE, arg, NULL_TREE); |
| 12228 | arg_ptr = &TREE_CHAIN (*arg_ptr); |
| 12229 | } |
| 12230 | *arg_ptr = arg; |
| 12231 | key.constraints = tree_node (); |
| 12232 | } |
| 12233 | |
| 12234 | if (get_overrun ()) |
| 12235 | return error_mark_node; |
| 12236 | |
| 12237 | if (mk < MK_indirect_lwm) |
| 12238 | { |
| 12239 | DECL_NAME (decl) = name; |
| 12240 | DECL_CONTEXT (decl) = FROB_CONTEXT (container); |
| 12241 | } |
| 12242 | DECL_NAME (inner) = DECL_NAME (decl); |
| 12243 | DECL_CONTEXT (inner) = DECL_CONTEXT (decl); |
| 12244 | |
| 12245 | if (mk == MK_partial) |
| 12246 | { |
| 12247 | for (tree spec = DECL_TEMPLATE_SPECIALIZATIONS (key.ret); |
| 12248 | spec; spec = TREE_CHAIN (spec)) |
| 12249 | { |
| 12250 | tree tmpl = TREE_VALUE (spec); |
| 12251 | tree ti = get_template_info (tmpl); |
| 12252 | if (template_args_equal (key.args, TI_ARGS (ti)) |
| 12253 | && cp_tree_equal (key.constraints, |
| 12254 | get_constraints |
| 12255 | (DECL_TEMPLATE_RESULT (tmpl)))) |
| 12256 | { |
| 12257 | existing = tmpl; |
| 12258 | break; |
| 12259 | } |
| 12260 | } |
| 12261 | } |
| 12262 | else if (mk == MK_keyed |
| 12263 | && DECL_LANG_SPECIFIC (name) |
| 12264 | && DECL_MODULE_KEYED_DECLS_P (name)) |
| 12265 | { |
| 12266 | gcc_checking_assert (TREE_CODE (container) == NAMESPACE_DECL |
| 12267 | || TREE_CODE (container) == TYPE_DECL |
| 12268 | || TREE_CODE (container) == FUNCTION_DECL); |
| 12269 | if (auto *set = keyed_table->get (k: name)) |
| 12270 | if (key.index < set->length ()) |
| 12271 | { |
| 12272 | existing = (*set)[key.index]; |
| 12273 | if (existing) |
| 12274 | { |
| 12275 | gcc_checking_assert |
| 12276 | (DECL_IMPLICIT_TYPEDEF_P (existing)); |
| 12277 | if (inner != decl) |
| 12278 | existing |
| 12279 | = CLASSTYPE_TI_TEMPLATE (TREE_TYPE (existing)); |
| 12280 | } |
| 12281 | } |
| 12282 | } |
| 12283 | else |
| 12284 | switch (TREE_CODE (container)) |
| 12285 | { |
| 12286 | default: |
| 12287 | gcc_unreachable (); |
| 12288 | |
| 12289 | case NAMESPACE_DECL: |
| 12290 | if (is_attached |
| 12291 | && !is_imported_temploid_friend |
| 12292 | && !(state->is_module () || state->is_partition ())) |
| 12293 | kind = "unique" ; |
| 12294 | else |
| 12295 | { |
| 12296 | gcc_checking_assert (mk == MK_named || mk == MK_enum); |
| 12297 | tree mvec; |
| 12298 | tree *vslot = mergeable_namespace_slots (ns: container, name, |
| 12299 | is_attached, mvec: &mvec); |
| 12300 | existing = check_mergeable_decl (mk, decl, ovl: *vslot, key); |
| 12301 | if (!existing) |
| 12302 | add_mergeable_namespace_entity (slot: vslot, decl); |
| 12303 | else |
| 12304 | { |
| 12305 | /* Note that we now have duplicates to deal with in |
| 12306 | name lookup. */ |
| 12307 | if (is_attached) |
| 12308 | BINDING_VECTOR_PARTITION_DUPS_P (mvec) = true; |
| 12309 | else |
| 12310 | BINDING_VECTOR_GLOBAL_DUPS_P (mvec) = true; |
| 12311 | } |
| 12312 | } |
| 12313 | break; |
| 12314 | |
| 12315 | case FUNCTION_DECL: |
| 12316 | gcc_checking_assert (mk == MK_local_type); |
| 12317 | existing = key_local_type (key, fn: container, name); |
| 12318 | if (existing && inner != decl) |
| 12319 | existing = TYPE_TI_TEMPLATE (TREE_TYPE (existing)); |
| 12320 | break; |
| 12321 | |
| 12322 | case TYPE_DECL: |
| 12323 | gcc_checking_assert (!is_imported_temploid_friend); |
| 12324 | if (is_attached && !(state->is_module () || state->is_partition ()) |
| 12325 | /* Implicit member functions can come from |
| 12326 | anywhere. */ |
| 12327 | && !(DECL_ARTIFICIAL (decl) |
| 12328 | && TREE_CODE (decl) == FUNCTION_DECL |
| 12329 | && !DECL_THUNK_P (decl))) |
| 12330 | kind = "unique" ; |
| 12331 | else |
| 12332 | { |
| 12333 | tree ctx = TREE_TYPE (container); |
| 12334 | |
| 12335 | /* For some reason templated enumeral types are not marked |
| 12336 | as COMPLETE_TYPE_P, even though they have members. |
| 12337 | This may well be a bug elsewhere. */ |
| 12338 | if (TREE_CODE (ctx) == ENUMERAL_TYPE) |
| 12339 | existing = find_enum_member (ctx, name); |
| 12340 | else if (COMPLETE_TYPE_P (ctx)) |
| 12341 | { |
| 12342 | switch (mk) |
| 12343 | { |
| 12344 | default: |
| 12345 | gcc_unreachable (); |
| 12346 | |
| 12347 | case MK_named: |
| 12348 | existing = lookup_class_binding (ctx, name); |
| 12349 | if (existing) |
| 12350 | { |
| 12351 | tree inner = decl; |
| 12352 | if (TREE_CODE (inner) == TEMPLATE_DECL |
| 12353 | && !DECL_MEMBER_TEMPLATE_P (inner)) |
| 12354 | inner = DECL_TEMPLATE_RESULT (inner); |
| 12355 | |
| 12356 | existing = check_mergeable_decl |
| 12357 | (mk, decl: inner, ovl: existing, key); |
| 12358 | |
| 12359 | if (!existing && DECL_ALIAS_TEMPLATE_P (decl)) |
| 12360 | {} // FIXME: Insert into specialization |
| 12361 | // tables, we'll need the arguments for that! |
| 12362 | } |
| 12363 | break; |
| 12364 | |
| 12365 | case MK_field: |
| 12366 | { |
| 12367 | unsigned ix = key.index; |
| 12368 | for (tree field = TYPE_FIELDS (ctx); |
| 12369 | field; field = DECL_CHAIN (field)) |
| 12370 | { |
| 12371 | tree finner = STRIP_TEMPLATE (field); |
| 12372 | if (TREE_CODE (finner) == TREE_CODE (inner)) |
| 12373 | if (!ix--) |
| 12374 | { |
| 12375 | existing = field; |
| 12376 | break; |
| 12377 | } |
| 12378 | } |
| 12379 | } |
| 12380 | break; |
| 12381 | |
| 12382 | case MK_vtable: |
| 12383 | { |
| 12384 | unsigned ix = key.index; |
| 12385 | for (tree vtable = CLASSTYPE_VTABLES (ctx); |
| 12386 | vtable; vtable = DECL_CHAIN (vtable)) |
| 12387 | if (!ix--) |
| 12388 | { |
| 12389 | existing = vtable; |
| 12390 | break; |
| 12391 | } |
| 12392 | } |
| 12393 | break; |
| 12394 | |
| 12395 | case MK_as_base: |
| 12396 | { |
| 12397 | tree as_base = CLASSTYPE_AS_BASE (ctx); |
| 12398 | if (as_base && as_base != ctx) |
| 12399 | existing = TYPE_NAME (as_base); |
| 12400 | } |
| 12401 | break; |
| 12402 | |
| 12403 | case MK_local_friend: |
| 12404 | { |
| 12405 | unsigned ix = key.index; |
| 12406 | for (tree decls = CLASSTYPE_DECL_LIST (ctx); |
| 12407 | decls; decls = TREE_CHAIN (decls)) |
| 12408 | if (!TREE_PURPOSE (decls) && !ix--) |
| 12409 | { |
| 12410 | existing |
| 12411 | = friend_from_decl_list (TREE_VALUE (decls)); |
| 12412 | break; |
| 12413 | } |
| 12414 | } |
| 12415 | break; |
| 12416 | } |
| 12417 | |
| 12418 | if (existing && mk < MK_indirect_lwm && mk != MK_partial |
| 12419 | && TREE_CODE (decl) == TEMPLATE_DECL |
| 12420 | && !DECL_MEMBER_TEMPLATE_P (decl)) |
| 12421 | { |
| 12422 | tree ti; |
| 12423 | if (DECL_IMPLICIT_TYPEDEF_P (existing)) |
| 12424 | ti = TYPE_TEMPLATE_INFO (TREE_TYPE (existing)); |
| 12425 | else |
| 12426 | ti = DECL_TEMPLATE_INFO (existing); |
| 12427 | existing = TI_TEMPLATE (ti); |
| 12428 | } |
| 12429 | } |
| 12430 | } |
| 12431 | } |
| 12432 | } |
| 12433 | |
| 12434 | dump (dumper::MERGE) |
| 12435 | && dump ("Read:%d's %s merge key (%s) %C:%N" , tag, merge_kind_name[mk], |
| 12436 | existing ? "matched" : kind, TREE_CODE (decl), decl); |
| 12437 | |
| 12438 | return existing; |
| 12439 | } |
| 12440 | |
| 12441 | void |
| 12442 | trees_out::binfo_mergeable (tree binfo) |
| 12443 | { |
| 12444 | tree dom = binfo; |
| 12445 | while (tree parent = BINFO_INHERITANCE_CHAIN (dom)) |
| 12446 | dom = parent; |
| 12447 | tree type = BINFO_TYPE (dom); |
| 12448 | gcc_checking_assert (TYPE_BINFO (type) == dom); |
| 12449 | tree_node (t: type); |
| 12450 | if (streaming_p ()) |
| 12451 | { |
| 12452 | unsigned ix = 0; |
| 12453 | for (; dom != binfo; dom = TREE_CHAIN (dom)) |
| 12454 | ix++; |
| 12455 | u (v: ix); |
| 12456 | } |
| 12457 | } |
| 12458 | |
| 12459 | unsigned |
| 12460 | trees_in::binfo_mergeable (tree *type) |
| 12461 | { |
| 12462 | *type = tree_node (); |
| 12463 | return u (); |
| 12464 | } |
| 12465 | |
| 12466 | /* DECL is a just streamed declaration with attributes DATTR that should |
| 12467 | have matching ABI tags as EXISTING's attributes EATTR. Check that the |
| 12468 | ABI tags match, and report an error if not. */ |
| 12469 | |
| 12470 | void |
| 12471 | trees_in::check_abi_tags (tree existing, tree decl, tree &eattr, tree &dattr) |
| 12472 | { |
| 12473 | tree etags = lookup_attribute (attr_name: "abi_tag" , list: eattr); |
| 12474 | tree dtags = lookup_attribute (attr_name: "abi_tag" , list: dattr); |
| 12475 | if ((etags == nullptr) != (dtags == nullptr) |
| 12476 | || (etags && !attribute_value_equal (etags, dtags))) |
| 12477 | { |
| 12478 | if (etags) |
| 12479 | etags = TREE_VALUE (etags); |
| 12480 | if (dtags) |
| 12481 | dtags = TREE_VALUE (dtags); |
| 12482 | |
| 12483 | /* We only error if mangling wouldn't consider the tags equivalent. */ |
| 12484 | if (!equal_abi_tags (etags, dtags)) |
| 12485 | { |
| 12486 | auto_diagnostic_group d; |
| 12487 | if (dtags) |
| 12488 | error_at (DECL_SOURCE_LOCATION (decl), |
| 12489 | "mismatching abi tags for %qD with tags %qE" , |
| 12490 | decl, dtags); |
| 12491 | else |
| 12492 | error_at (DECL_SOURCE_LOCATION (decl), |
| 12493 | "mismatching abi tags for %qD with no tags" , decl); |
| 12494 | if (etags) |
| 12495 | inform (DECL_SOURCE_LOCATION (existing), |
| 12496 | "existing declaration here with tags %qE" , etags); |
| 12497 | else |
| 12498 | inform (DECL_SOURCE_LOCATION (existing), |
| 12499 | "existing declaration here with no tags" ); |
| 12500 | } |
| 12501 | |
| 12502 | /* Always use the existing abi_tags as the canonical set so that |
| 12503 | later processing doesn't get confused. */ |
| 12504 | if (dtags) |
| 12505 | dattr = remove_attribute ("abi_tag" , dattr); |
| 12506 | if (etags) |
| 12507 | duplicate_one_attribute (&dattr, eattr, "abi_tag" ); |
| 12508 | } |
| 12509 | } |
| 12510 | |
| 12511 | /* DECL is a just streamed mergeable decl that should match EXISTING. Check |
| 12512 | it does and issue an appropriate diagnostic if not. Merge any |
| 12513 | bits from DECL to EXISTING. This is stricter matching than |
| 12514 | decls_match, because we can rely on ODR-sameness, and we cannot use |
| 12515 | decls_match because it can cause instantiations of constraints. */ |
| 12516 | |
| 12517 | bool |
| 12518 | trees_in::is_matching_decl (tree existing, tree decl, bool is_typedef) |
| 12519 | { |
| 12520 | // FIXME: We should probably do some duplicate decl-like stuff here |
| 12521 | // (beware, default parms should be the same?) Can we just call |
| 12522 | // duplicate_decls and teach it how to handle the module-specific |
| 12523 | // permitted/required duplications? |
| 12524 | |
| 12525 | // We know at this point that the decls have matched by key, so we |
| 12526 | // can elide some of the checking |
| 12527 | gcc_checking_assert (TREE_CODE (existing) == TREE_CODE (decl)); |
| 12528 | |
| 12529 | tree d_inner = decl; |
| 12530 | tree e_inner = existing; |
| 12531 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 12532 | { |
| 12533 | d_inner = DECL_TEMPLATE_RESULT (d_inner); |
| 12534 | e_inner = DECL_TEMPLATE_RESULT (e_inner); |
| 12535 | gcc_checking_assert (TREE_CODE (e_inner) == TREE_CODE (d_inner)); |
| 12536 | } |
| 12537 | |
| 12538 | // FIXME: do more precise errors at point of mismatch |
| 12539 | const char *mismatch_msg = nullptr; |
| 12540 | |
| 12541 | if (VAR_OR_FUNCTION_DECL_P (d_inner) |
| 12542 | && DECL_EXTERN_C_P (d_inner) != DECL_EXTERN_C_P (e_inner)) |
| 12543 | { |
| 12544 | mismatch_msg = G_("conflicting language linkage for imported " |
| 12545 | "declaration %#qD" ); |
| 12546 | goto mismatch; |
| 12547 | } |
| 12548 | else if (TREE_CODE (d_inner) == FUNCTION_DECL) |
| 12549 | { |
| 12550 | tree e_ret = fndecl_declared_return_type (existing); |
| 12551 | tree d_ret = fndecl_declared_return_type (decl); |
| 12552 | |
| 12553 | if (decl != d_inner && DECL_NAME (d_inner) == fun_identifier |
| 12554 | && LAMBDA_TYPE_P (DECL_CONTEXT (d_inner))) |
| 12555 | /* This has a recursive type that will compare different. */; |
| 12556 | else if (!same_type_p (d_ret, e_ret)) |
| 12557 | { |
| 12558 | mismatch_msg = G_("conflicting type for imported declaration %#qD" ); |
| 12559 | goto mismatch; |
| 12560 | } |
| 12561 | |
| 12562 | tree e_type = TREE_TYPE (e_inner); |
| 12563 | tree d_type = TREE_TYPE (d_inner); |
| 12564 | |
| 12565 | for (tree e_args = TYPE_ARG_TYPES (e_type), |
| 12566 | d_args = TYPE_ARG_TYPES (d_type); |
| 12567 | e_args != d_args && (e_args || d_args); |
| 12568 | e_args = TREE_CHAIN (e_args), d_args = TREE_CHAIN (d_args)) |
| 12569 | { |
| 12570 | if (!(e_args && d_args)) |
| 12571 | { |
| 12572 | mismatch_msg = G_("conflicting argument list for imported " |
| 12573 | "declaration %#qD" ); |
| 12574 | goto mismatch; |
| 12575 | } |
| 12576 | |
| 12577 | if (!same_type_p (TREE_VALUE (d_args), TREE_VALUE (e_args))) |
| 12578 | { |
| 12579 | mismatch_msg = G_("conflicting argument types for imported " |
| 12580 | "declaration %#qD" ); |
| 12581 | goto mismatch; |
| 12582 | } |
| 12583 | } |
| 12584 | |
| 12585 | /* If EXISTING has an undeduced or uninstantiated exception |
| 12586 | specification, but DECL does not, propagate the exception |
| 12587 | specification. Otherwise we end up asserting or trying to |
| 12588 | instantiate it in the middle of loading. */ |
| 12589 | tree e_spec = TYPE_RAISES_EXCEPTIONS (e_type); |
| 12590 | tree d_spec = TYPE_RAISES_EXCEPTIONS (d_type); |
| 12591 | if (DECL_MAYBE_DELETED (e_inner) || DEFERRED_NOEXCEPT_SPEC_P (e_spec)) |
| 12592 | { |
| 12593 | if (!DEFERRED_NOEXCEPT_SPEC_P (d_spec) |
| 12594 | || (UNEVALUATED_NOEXCEPT_SPEC_P (e_spec) |
| 12595 | && !UNEVALUATED_NOEXCEPT_SPEC_P (d_spec))) |
| 12596 | { |
| 12597 | dump (dumper::MERGE) |
| 12598 | && dump ("Propagating instantiated noexcept to %N" , existing); |
| 12599 | TREE_TYPE (existing) = d_type; |
| 12600 | |
| 12601 | /* Propagate to existing clones. */ |
| 12602 | tree clone; |
| 12603 | FOR_EACH_CLONE (clone, existing) |
| 12604 | { |
| 12605 | if (TREE_TYPE (clone) == e_type) |
| 12606 | TREE_TYPE (clone) = d_type; |
| 12607 | else |
| 12608 | TREE_TYPE (clone) |
| 12609 | = build_exception_variant (TREE_TYPE (clone), d_spec); |
| 12610 | } |
| 12611 | } |
| 12612 | } |
| 12613 | else if (!DECL_MAYBE_DELETED (d_inner) |
| 12614 | && !DEFERRED_NOEXCEPT_SPEC_P (d_spec) |
| 12615 | && !comp_except_specs (d_spec, e_spec, ce_type)) |
| 12616 | { |
| 12617 | mismatch_msg = G_("conflicting %<noexcept%> specifier for " |
| 12618 | "imported declaration %#qD" ); |
| 12619 | goto mismatch; |
| 12620 | } |
| 12621 | |
| 12622 | /* Similarly if EXISTING has an undeduced return type, but DECL's |
| 12623 | is already deduced. */ |
| 12624 | if (undeduced_auto_decl (existing) && !undeduced_auto_decl (decl)) |
| 12625 | { |
| 12626 | dump (dumper::MERGE) |
| 12627 | && dump ("Propagating deduced return type to %N" , existing); |
| 12628 | gcc_checking_assert (existing == e_inner); |
| 12629 | FNDECL_USED_AUTO (existing) = true; |
| 12630 | DECL_SAVED_AUTO_RETURN_TYPE (existing) = TREE_TYPE (e_type); |
| 12631 | TREE_TYPE (existing) = change_return_type (TREE_TYPE (d_type), e_type); |
| 12632 | } |
| 12633 | else if (type_uses_auto (d_ret) |
| 12634 | && !same_type_p (TREE_TYPE (d_type), TREE_TYPE (e_type))) |
| 12635 | { |
| 12636 | mismatch_msg = G_("conflicting deduced return type for " |
| 12637 | "imported declaration %#qD" ); |
| 12638 | goto mismatch; |
| 12639 | } |
| 12640 | |
| 12641 | /* Similarly if EXISTING has undeduced constexpr, but DECL's |
| 12642 | is already deduced. */ |
| 12643 | if (DECL_DECLARED_CONSTEXPR_P (e_inner) |
| 12644 | == DECL_DECLARED_CONSTEXPR_P (d_inner)) |
| 12645 | /* Already matches. */; |
| 12646 | else if (DECL_DECLARED_CONSTEXPR_P (d_inner) |
| 12647 | && (DECL_MAYBE_DELETED (e_inner) |
| 12648 | || decl_implicit_constexpr_p (d_inner))) |
| 12649 | /* DECL was deduced, copy to EXISTING. */ |
| 12650 | { |
| 12651 | DECL_DECLARED_CONSTEXPR_P (e_inner) = true; |
| 12652 | if (decl_implicit_constexpr_p (d_inner)) |
| 12653 | DECL_LANG_SPECIFIC (e_inner)->u.fn.implicit_constexpr = true; |
| 12654 | } |
| 12655 | else if (DECL_DECLARED_CONSTEXPR_P (e_inner) |
| 12656 | && (DECL_MAYBE_DELETED (d_inner) |
| 12657 | || decl_implicit_constexpr_p (e_inner))) |
| 12658 | /* EXISTING was deduced, leave it alone. */; |
| 12659 | else |
| 12660 | { |
| 12661 | mismatch_msg = G_("conflicting %<constexpr%> for imported " |
| 12662 | "declaration %#qD" ); |
| 12663 | goto mismatch; |
| 12664 | } |
| 12665 | |
| 12666 | /* Don't synthesize a defaulted function if we're importing one |
| 12667 | we've already determined. */ |
| 12668 | if (!DECL_MAYBE_DELETED (d_inner)) |
| 12669 | DECL_MAYBE_DELETED (e_inner) = false; |
| 12670 | } |
| 12671 | else if (is_typedef) |
| 12672 | { |
| 12673 | if (!DECL_ORIGINAL_TYPE (e_inner) |
| 12674 | || !same_type_p (DECL_ORIGINAL_TYPE (d_inner), |
| 12675 | DECL_ORIGINAL_TYPE (e_inner))) |
| 12676 | { |
| 12677 | mismatch_msg = G_("conflicting imported declaration %q#D" ); |
| 12678 | goto mismatch; |
| 12679 | } |
| 12680 | } |
| 12681 | /* Using cp_tree_equal because we can meet TYPE_ARGUMENT_PACKs |
| 12682 | here. I suspect the entities that directly do that are things |
| 12683 | that shouldn't go to duplicate_decls (FIELD_DECLs etc). */ |
| 12684 | else if (!cp_tree_equal (TREE_TYPE (decl), TREE_TYPE (existing))) |
| 12685 | { |
| 12686 | mismatch_msg = G_("conflicting type for imported declaration %#qD" ); |
| 12687 | mismatch: |
| 12688 | if (DECL_IS_UNDECLARED_BUILTIN (existing)) |
| 12689 | /* Just like duplicate_decls, presum the user knows what |
| 12690 | they're doing in overriding a builtin. */ |
| 12691 | TREE_TYPE (existing) = TREE_TYPE (decl); |
| 12692 | else if (decl_function_context (decl)) |
| 12693 | /* The type of a mergeable local entity (such as a function scope |
| 12694 | capturing lambda's closure type fields) can depend on an |
| 12695 | unmergeable local entity (such as a local variable), so type |
| 12696 | equality isn't feasible in general for local entities. */; |
| 12697 | else |
| 12698 | { |
| 12699 | gcc_checking_assert (mismatch_msg); |
| 12700 | auto_diagnostic_group d; |
| 12701 | error_at (DECL_SOURCE_LOCATION (decl), mismatch_msg, decl); |
| 12702 | inform (DECL_SOURCE_LOCATION (existing), |
| 12703 | "existing declaration %#qD" , existing); |
| 12704 | return false; |
| 12705 | } |
| 12706 | } |
| 12707 | |
| 12708 | if (DECL_IS_UNDECLARED_BUILTIN (existing) |
| 12709 | && !DECL_IS_UNDECLARED_BUILTIN (decl)) |
| 12710 | { |
| 12711 | /* We're matching a builtin that the user has yet to declare. |
| 12712 | We are the one! This is very much duplicate-decl |
| 12713 | shenanigans. */ |
| 12714 | DECL_SOURCE_LOCATION (existing) = DECL_SOURCE_LOCATION (decl); |
| 12715 | if (TREE_CODE (decl) != TYPE_DECL) |
| 12716 | { |
| 12717 | /* Propagate exceptions etc. */ |
| 12718 | TREE_TYPE (existing) = TREE_TYPE (decl); |
| 12719 | TREE_NOTHROW (existing) = TREE_NOTHROW (decl); |
| 12720 | } |
| 12721 | /* This is actually an import! */ |
| 12722 | DECL_MODULE_IMPORT_P (existing) = true; |
| 12723 | |
| 12724 | /* Yay, sliced! */ |
| 12725 | existing->base = decl->base; |
| 12726 | |
| 12727 | if (TREE_CODE (decl) == FUNCTION_DECL) |
| 12728 | { |
| 12729 | /* Ew :( */ |
| 12730 | memcpy (dest: &existing->decl_common.size, |
| 12731 | src: &decl->decl_common.size, |
| 12732 | n: (offsetof (tree_decl_common, pt_uid) |
| 12733 | - offsetof (tree_decl_common, size))); |
| 12734 | auto bltin_class = DECL_BUILT_IN_CLASS (decl); |
| 12735 | existing->function_decl.built_in_class = bltin_class; |
| 12736 | auto fncode = DECL_UNCHECKED_FUNCTION_CODE (decl); |
| 12737 | DECL_UNCHECKED_FUNCTION_CODE (existing) = fncode; |
| 12738 | if (existing->function_decl.built_in_class == BUILT_IN_NORMAL) |
| 12739 | { |
| 12740 | if (builtin_decl_explicit_p (fncode: built_in_function (fncode))) |
| 12741 | switch (fncode) |
| 12742 | { |
| 12743 | case BUILT_IN_STPCPY: |
| 12744 | set_builtin_decl_implicit_p |
| 12745 | (fncode: built_in_function (fncode), implicit_p: true); |
| 12746 | break; |
| 12747 | default: |
| 12748 | set_builtin_decl_declared_p |
| 12749 | (fncode: built_in_function (fncode), declared_p: true); |
| 12750 | break; |
| 12751 | } |
| 12752 | copy_attributes_to_builtin (decl); |
| 12753 | } |
| 12754 | } |
| 12755 | } |
| 12756 | |
| 12757 | if (VAR_OR_FUNCTION_DECL_P (decl) |
| 12758 | && DECL_TEMPLATE_INSTANTIATED (decl)) |
| 12759 | /* Don't instantiate again! */ |
| 12760 | DECL_TEMPLATE_INSTANTIATED (existing) = true; |
| 12761 | |
| 12762 | if (TREE_CODE (d_inner) == FUNCTION_DECL |
| 12763 | && DECL_DECLARED_INLINE_P (d_inner)) |
| 12764 | { |
| 12765 | DECL_DECLARED_INLINE_P (e_inner) = true; |
| 12766 | if (!DECL_SAVED_TREE (e_inner) |
| 12767 | && lookup_attribute (attr_name: "gnu_inline" , DECL_ATTRIBUTES (d_inner)) |
| 12768 | && !lookup_attribute (attr_name: "gnu_inline" , DECL_ATTRIBUTES (e_inner))) |
| 12769 | { |
| 12770 | DECL_INTERFACE_KNOWN (e_inner) |
| 12771 | |= DECL_INTERFACE_KNOWN (d_inner); |
| 12772 | DECL_DISREGARD_INLINE_LIMITS (e_inner) |
| 12773 | |= DECL_DISREGARD_INLINE_LIMITS (d_inner); |
| 12774 | // TODO: we will eventually want to merge all decl attributes |
| 12775 | duplicate_one_attribute (&DECL_ATTRIBUTES (e_inner), |
| 12776 | DECL_ATTRIBUTES (d_inner), "gnu_inline" ); |
| 12777 | } |
| 12778 | } |
| 12779 | if (!DECL_EXTERNAL (d_inner)) |
| 12780 | DECL_EXTERNAL (e_inner) = false; |
| 12781 | |
| 12782 | if (VAR_OR_FUNCTION_DECL_P (d_inner)) |
| 12783 | check_abi_tags (existing, decl, |
| 12784 | DECL_ATTRIBUTES (e_inner), DECL_ATTRIBUTES (d_inner)); |
| 12785 | |
| 12786 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 12787 | { |
| 12788 | /* Merge default template arguments. */ |
| 12789 | tree d_parms = DECL_INNERMOST_TEMPLATE_PARMS (decl); |
| 12790 | tree e_parms = DECL_INNERMOST_TEMPLATE_PARMS (existing); |
| 12791 | gcc_checking_assert (TREE_VEC_LENGTH (d_parms) |
| 12792 | == TREE_VEC_LENGTH (e_parms)); |
| 12793 | for (int i = 0; i < TREE_VEC_LENGTH (d_parms); ++i) |
| 12794 | { |
| 12795 | tree d_default = TREE_PURPOSE (TREE_VEC_ELT (d_parms, i)); |
| 12796 | tree& e_default = TREE_PURPOSE (TREE_VEC_ELT (e_parms, i)); |
| 12797 | if (e_default == NULL_TREE) |
| 12798 | e_default = d_default; |
| 12799 | else if (d_default != NULL_TREE |
| 12800 | && !cp_tree_equal (d_default, e_default)) |
| 12801 | { |
| 12802 | auto_diagnostic_group d; |
| 12803 | tree d_parm = TREE_VALUE (TREE_VEC_ELT (d_parms, i)); |
| 12804 | tree e_parm = TREE_VALUE (TREE_VEC_ELT (e_parms, i)); |
| 12805 | error_at (DECL_SOURCE_LOCATION (d_parm), |
| 12806 | "conflicting default argument for %#qD" , d_parm); |
| 12807 | inform (DECL_SOURCE_LOCATION (e_parm), |
| 12808 | "existing default declared here" ); |
| 12809 | return false; |
| 12810 | } |
| 12811 | } |
| 12812 | } |
| 12813 | |
| 12814 | if (TREE_CODE (d_inner) == FUNCTION_DECL) |
| 12815 | { |
| 12816 | /* Merge default function arguments. */ |
| 12817 | tree d_parm = FUNCTION_FIRST_USER_PARMTYPE (d_inner); |
| 12818 | tree e_parm = FUNCTION_FIRST_USER_PARMTYPE (e_inner); |
| 12819 | int i = 0; |
| 12820 | for (; d_parm && d_parm != void_list_node; |
| 12821 | d_parm = TREE_CHAIN (d_parm), e_parm = TREE_CHAIN (e_parm), ++i) |
| 12822 | { |
| 12823 | tree d_default = TREE_PURPOSE (d_parm); |
| 12824 | tree& e_default = TREE_PURPOSE (e_parm); |
| 12825 | if (e_default == NULL_TREE) |
| 12826 | e_default = d_default; |
| 12827 | else if (d_default != NULL_TREE |
| 12828 | && !cp_tree_equal (d_default, e_default)) |
| 12829 | { |
| 12830 | auto_diagnostic_group d; |
| 12831 | error_at (get_fndecl_argument_location (d_inner, i), |
| 12832 | "conflicting default argument for parameter %P of %#qD" , |
| 12833 | i, decl); |
| 12834 | inform (get_fndecl_argument_location (e_inner, i), |
| 12835 | "existing default declared here" ); |
| 12836 | return false; |
| 12837 | } |
| 12838 | } |
| 12839 | } |
| 12840 | |
| 12841 | return true; |
| 12842 | } |
| 12843 | |
| 12844 | /* FN is an implicit member function that we've discovered is new to |
| 12845 | the class. Add it to the TYPE_FIELDS chain and the method vector. |
| 12846 | Reset the appropriate classtype lazy flag. */ |
| 12847 | |
| 12848 | bool |
| 12849 | trees_in::install_implicit_member (tree fn) |
| 12850 | { |
| 12851 | tree ctx = DECL_CONTEXT (fn); |
| 12852 | tree name = DECL_NAME (fn); |
| 12853 | /* We know these are synthesized, so the set of expected prototypes |
| 12854 | is quite restricted. We're not validating correctness, just |
| 12855 | distinguishing beteeen the small set of possibilities. */ |
| 12856 | tree parm_type = TREE_VALUE (FUNCTION_FIRST_USER_PARMTYPE (fn)); |
| 12857 | if (IDENTIFIER_CTOR_P (name)) |
| 12858 | { |
| 12859 | if (CLASSTYPE_LAZY_DEFAULT_CTOR (ctx) |
| 12860 | && VOID_TYPE_P (parm_type)) |
| 12861 | CLASSTYPE_LAZY_DEFAULT_CTOR (ctx) = false; |
| 12862 | else if (!TYPE_REF_P (parm_type)) |
| 12863 | return false; |
| 12864 | else if (CLASSTYPE_LAZY_COPY_CTOR (ctx) |
| 12865 | && !TYPE_REF_IS_RVALUE (parm_type)) |
| 12866 | CLASSTYPE_LAZY_COPY_CTOR (ctx) = false; |
| 12867 | else if (CLASSTYPE_LAZY_MOVE_CTOR (ctx)) |
| 12868 | CLASSTYPE_LAZY_MOVE_CTOR (ctx) = false; |
| 12869 | else |
| 12870 | return false; |
| 12871 | } |
| 12872 | else if (IDENTIFIER_DTOR_P (name)) |
| 12873 | { |
| 12874 | if (CLASSTYPE_LAZY_DESTRUCTOR (ctx)) |
| 12875 | CLASSTYPE_LAZY_DESTRUCTOR (ctx) = false; |
| 12876 | else |
| 12877 | return false; |
| 12878 | if (DECL_VIRTUAL_P (fn)) |
| 12879 | /* A virtual dtor should have been created when the class |
| 12880 | became complete. */ |
| 12881 | return false; |
| 12882 | } |
| 12883 | else if (name == assign_op_identifier) |
| 12884 | { |
| 12885 | if (!TYPE_REF_P (parm_type)) |
| 12886 | return false; |
| 12887 | else if (CLASSTYPE_LAZY_COPY_ASSIGN (ctx) |
| 12888 | && !TYPE_REF_IS_RVALUE (parm_type)) |
| 12889 | CLASSTYPE_LAZY_COPY_ASSIGN (ctx) = false; |
| 12890 | else if (CLASSTYPE_LAZY_MOVE_ASSIGN (ctx)) |
| 12891 | CLASSTYPE_LAZY_MOVE_ASSIGN (ctx) = false; |
| 12892 | else |
| 12893 | return false; |
| 12894 | } |
| 12895 | else |
| 12896 | return false; |
| 12897 | |
| 12898 | dump (dumper::MERGE) && dump ("Adding implicit member %N" , fn); |
| 12899 | |
| 12900 | DECL_CHAIN (fn) = TYPE_FIELDS (ctx); |
| 12901 | TYPE_FIELDS (ctx) = fn; |
| 12902 | |
| 12903 | add_method (ctx, fn, false); |
| 12904 | |
| 12905 | /* Propagate TYPE_FIELDS. */ |
| 12906 | fixup_type_variants (ctx); |
| 12907 | |
| 12908 | return true; |
| 12909 | } |
| 12910 | |
| 12911 | /* Return non-zero if DECL has a definition that would be interesting to |
| 12912 | write out. */ |
| 12913 | |
| 12914 | static bool |
| 12915 | has_definition (tree decl) |
| 12916 | { |
| 12917 | bool is_tmpl = TREE_CODE (decl) == TEMPLATE_DECL; |
| 12918 | if (is_tmpl) |
| 12919 | decl = DECL_TEMPLATE_RESULT (decl); |
| 12920 | |
| 12921 | switch (TREE_CODE (decl)) |
| 12922 | { |
| 12923 | default: |
| 12924 | break; |
| 12925 | |
| 12926 | case FUNCTION_DECL: |
| 12927 | if (!DECL_SAVED_TREE (decl)) |
| 12928 | /* Not defined. */ |
| 12929 | break; |
| 12930 | |
| 12931 | if (DECL_DECLARED_INLINE_P (decl)) |
| 12932 | return true; |
| 12933 | |
| 12934 | if (header_module_p ()) |
| 12935 | /* We always need to write definitions in header modules, |
| 12936 | since there's no TU to emit them in otherwise. */ |
| 12937 | return true; |
| 12938 | |
| 12939 | if (DECL_TEMPLATE_INFO (decl)) |
| 12940 | { |
| 12941 | int use_tpl = DECL_USE_TEMPLATE (decl); |
| 12942 | |
| 12943 | // FIXME: Partial specializations have definitions too. |
| 12944 | if (use_tpl < 2) |
| 12945 | return true; |
| 12946 | } |
| 12947 | |
| 12948 | /* Coroutine transform functions always need to be emitted |
| 12949 | into the importing TU if the ramp function will be. */ |
| 12950 | if (DECL_COROUTINE_P (decl)) |
| 12951 | if (tree ramp = DECL_RAMP_FN (decl)) |
| 12952 | return has_definition (decl: ramp); |
| 12953 | break; |
| 12954 | |
| 12955 | case TYPE_DECL: |
| 12956 | { |
| 12957 | tree type = TREE_TYPE (decl); |
| 12958 | if (type == TYPE_MAIN_VARIANT (type) |
| 12959 | && decl == TYPE_NAME (type) |
| 12960 | && (TREE_CODE (type) == ENUMERAL_TYPE |
| 12961 | ? TYPE_VALUES (type) : TYPE_FIELDS (type))) |
| 12962 | return true; |
| 12963 | } |
| 12964 | break; |
| 12965 | |
| 12966 | case VAR_DECL: |
| 12967 | /* DECL_INITIALIZED_P might not be set on a dependent VAR_DECL. */ |
| 12968 | if (DECL_LANG_SPECIFIC (decl) |
| 12969 | && DECL_TEMPLATE_INFO (decl) |
| 12970 | && DECL_INITIAL (decl)) |
| 12971 | return true; |
| 12972 | else |
| 12973 | { |
| 12974 | if (!DECL_INITIALIZED_P (decl)) |
| 12975 | /* Not defined. */ |
| 12976 | return false; |
| 12977 | |
| 12978 | if (header_module_p ()) |
| 12979 | /* We always need to write definitions in header modules, |
| 12980 | since there's no TU to emit them in otherwise. */ |
| 12981 | return true; |
| 12982 | |
| 12983 | if (decl_maybe_constant_var_p (decl)) |
| 12984 | /* We might need its constant value. */ |
| 12985 | return true; |
| 12986 | |
| 12987 | if (vague_linkage_p (decl)) |
| 12988 | /* These are emitted as needed. */ |
| 12989 | return true; |
| 12990 | |
| 12991 | return false; |
| 12992 | } |
| 12993 | break; |
| 12994 | |
| 12995 | case CONCEPT_DECL: |
| 12996 | if (DECL_INITIAL (decl)) |
| 12997 | return true; |
| 12998 | |
| 12999 | break; |
| 13000 | } |
| 13001 | |
| 13002 | return false; |
| 13003 | } |
| 13004 | |
| 13005 | uintptr_t * |
| 13006 | trees_in::find_duplicate (tree existing) |
| 13007 | { |
| 13008 | if (!duplicates) |
| 13009 | return NULL; |
| 13010 | |
| 13011 | return duplicates->get (k: existing); |
| 13012 | } |
| 13013 | |
| 13014 | /* We're starting to read a duplicate DECL. EXISTING is the already |
| 13015 | known node. */ |
| 13016 | |
| 13017 | void |
| 13018 | trees_in::register_duplicate (tree decl, tree existing) |
| 13019 | { |
| 13020 | if (!duplicates) |
| 13021 | duplicates = new duplicate_hash_map (40); |
| 13022 | |
| 13023 | bool existed; |
| 13024 | uintptr_t &slot = duplicates->get_or_insert (k: existing, existed: &existed); |
| 13025 | gcc_checking_assert (!existed); |
| 13026 | slot = reinterpret_cast<uintptr_t> (decl); |
| 13027 | |
| 13028 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 13029 | /* Also register the DECL_TEMPLATE_RESULT as a duplicate so |
| 13030 | that passing decl's _RESULT to maybe_duplicate naturally |
| 13031 | gives us existing's _RESULT back. */ |
| 13032 | register_duplicate (DECL_TEMPLATE_RESULT (decl), |
| 13033 | DECL_TEMPLATE_RESULT (existing)); |
| 13034 | } |
| 13035 | |
| 13036 | /* We've read a definition of MAYBE_EXISTING. If not a duplicate, |
| 13037 | return MAYBE_EXISTING (into which the definition should be |
| 13038 | installed). Otherwise return NULL if already known bad, or the |
| 13039 | duplicate we read (for ODR checking, or extracting additional merge |
| 13040 | information). */ |
| 13041 | |
| 13042 | tree |
| 13043 | trees_in::odr_duplicate (tree maybe_existing, bool has_defn) |
| 13044 | { |
| 13045 | tree res = NULL_TREE; |
| 13046 | |
| 13047 | if (uintptr_t *dup = find_duplicate (existing: maybe_existing)) |
| 13048 | { |
| 13049 | if (!(*dup & 1)) |
| 13050 | res = reinterpret_cast<tree> (*dup); |
| 13051 | } |
| 13052 | else |
| 13053 | res = maybe_existing; |
| 13054 | |
| 13055 | assert_definition (decl: maybe_existing, installing: res && !has_defn); |
| 13056 | |
| 13057 | // FIXME: We probably need to return the template, so that the |
| 13058 | // template header can be checked? |
| 13059 | return res ? STRIP_TEMPLATE (res) : NULL_TREE; |
| 13060 | } |
| 13061 | |
| 13062 | /* The following writer functions rely on the current behaviour of |
| 13063 | depset::hash::add_dependency making the decl and defn depset nodes |
| 13064 | depend on eachother. That way we don't have to worry about seeding |
| 13065 | the tree map with named decls that cannot be looked up by name (I.e |
| 13066 | template and function parms). We know the decl and definition will |
| 13067 | be in the same cluster, which is what we want. */ |
| 13068 | |
| 13069 | void |
| 13070 | trees_out::write_function_def (tree decl) |
| 13071 | { |
| 13072 | tree_node (DECL_RESULT (decl)); |
| 13073 | |
| 13074 | { |
| 13075 | /* The function body for a non-inline function or function template |
| 13076 | is ignored for determining exposures. This should only matter |
| 13077 | for templates (we don't emit the bodies of non-inline functions |
| 13078 | to begin with). */ |
| 13079 | auto ovr = dep_hash->ignore_exposure_if (cond: !DECL_DECLARED_INLINE_P (decl)); |
| 13080 | tree_node (DECL_INITIAL (decl)); |
| 13081 | tree_node (DECL_SAVED_TREE (decl)); |
| 13082 | } |
| 13083 | |
| 13084 | tree_node (DECL_FRIEND_CONTEXT (decl)); |
| 13085 | |
| 13086 | constexpr_fundef *cexpr = retrieve_constexpr_fundef (decl); |
| 13087 | |
| 13088 | if (streaming_p ()) |
| 13089 | u (v: cexpr != nullptr); |
| 13090 | if (cexpr) |
| 13091 | { |
| 13092 | chained_decls (decls: cexpr->parms); |
| 13093 | tree_node (t: cexpr->result); |
| 13094 | tree_node (t: cexpr->body); |
| 13095 | } |
| 13096 | |
| 13097 | function* f = DECL_STRUCT_FUNCTION (decl); |
| 13098 | |
| 13099 | if (streaming_p ()) |
| 13100 | { |
| 13101 | unsigned flags = 0; |
| 13102 | |
| 13103 | /* Whether the importer should emit this definition, if used. */ |
| 13104 | flags |= 1 * (DECL_NOT_REALLY_EXTERN (decl) |
| 13105 | && (get_importer_interface (decl) |
| 13106 | != importer_interface::external)); |
| 13107 | |
| 13108 | /* Make sure DECL_REALLY_EXTERN and DECL_INTERFACE_KNOWN are consistent |
| 13109 | on non-templates or we'll crash later in import_export_decl. */ |
| 13110 | gcc_checking_assert (flags || DECL_INTERFACE_KNOWN (decl) |
| 13111 | || (DECL_LANG_SPECIFIC (decl) |
| 13112 | && DECL_LOCAL_DECL_P (decl) |
| 13113 | && DECL_OMP_DECLARE_REDUCTION_P (decl)) |
| 13114 | || (DECL_LANG_SPECIFIC (decl) |
| 13115 | && DECL_TEMPLATE_INFO (decl) |
| 13116 | && uses_template_parms (DECL_TI_ARGS (decl)))); |
| 13117 | |
| 13118 | if (f) |
| 13119 | { |
| 13120 | flags |= 2; |
| 13121 | /* These flags are needed in tsubst_lambda_expr. */ |
| 13122 | flags |= 4 * f->language->returns_value; |
| 13123 | flags |= 8 * f->language->returns_null; |
| 13124 | flags |= 16 * f->language->returns_abnormally; |
| 13125 | flags |= 32 * f->language->infinite_loop; |
| 13126 | } |
| 13127 | |
| 13128 | u (v: flags); |
| 13129 | } |
| 13130 | |
| 13131 | if (state && f) |
| 13132 | { |
| 13133 | state->write_location (*this, f->function_start_locus); |
| 13134 | state->write_location (*this, f->function_end_locus); |
| 13135 | } |
| 13136 | |
| 13137 | if (DECL_COROUTINE_P (decl)) |
| 13138 | { |
| 13139 | tree ramp = DECL_RAMP_FN (decl); |
| 13140 | tree_node (t: ramp); |
| 13141 | if (!ramp) |
| 13142 | { |
| 13143 | tree_node (DECL_ACTOR_FN (decl)); |
| 13144 | tree_node (DECL_DESTROY_FN (decl)); |
| 13145 | } |
| 13146 | } |
| 13147 | } |
| 13148 | |
| 13149 | void |
| 13150 | trees_out::mark_function_def (tree) |
| 13151 | { |
| 13152 | } |
| 13153 | |
| 13154 | bool |
| 13155 | trees_in::read_function_def (tree decl, tree maybe_template) |
| 13156 | { |
| 13157 | dump () && dump ("Reading function definition %N" , decl); |
| 13158 | tree result = tree_node (); |
| 13159 | tree initial = tree_node (); |
| 13160 | tree saved = tree_node (); |
| 13161 | tree context = tree_node (); |
| 13162 | post_process_data pdata {}; |
| 13163 | pdata.decl = maybe_template; |
| 13164 | |
| 13165 | tree maybe_dup = odr_duplicate (maybe_existing: maybe_template, DECL_SAVED_TREE (decl)); |
| 13166 | bool installing = maybe_dup && !DECL_SAVED_TREE (decl); |
| 13167 | |
| 13168 | constexpr_fundef cexpr; |
| 13169 | if (u ()) |
| 13170 | { |
| 13171 | cexpr.parms = chained_decls (); |
| 13172 | cexpr.result = tree_node (); |
| 13173 | cexpr.body = tree_node (); |
| 13174 | cexpr.decl = decl; |
| 13175 | } |
| 13176 | else |
| 13177 | cexpr.decl = NULL_TREE; |
| 13178 | |
| 13179 | unsigned flags = u (); |
| 13180 | if (flags & 2) |
| 13181 | { |
| 13182 | pdata.start_locus = state->read_location (*this); |
| 13183 | pdata.end_locus = state->read_location (*this); |
| 13184 | pdata.returns_value = flags & 4; |
| 13185 | pdata.returns_null = flags & 8; |
| 13186 | pdata.returns_abnormally = flags & 16; |
| 13187 | pdata.infinite_loop = flags & 32; |
| 13188 | } |
| 13189 | |
| 13190 | tree coro_actor = NULL_TREE; |
| 13191 | tree coro_destroy = NULL_TREE; |
| 13192 | tree coro_ramp = NULL_TREE; |
| 13193 | if (DECL_COROUTINE_P (decl)) |
| 13194 | { |
| 13195 | coro_ramp = tree_node (); |
| 13196 | if (!coro_ramp) |
| 13197 | { |
| 13198 | coro_actor = tree_node (); |
| 13199 | coro_destroy = tree_node (); |
| 13200 | if ((coro_actor == NULL_TREE) != (coro_destroy == NULL_TREE)) |
| 13201 | set_overrun (); |
| 13202 | } |
| 13203 | } |
| 13204 | |
| 13205 | if (get_overrun ()) |
| 13206 | return NULL_TREE; |
| 13207 | |
| 13208 | if (installing) |
| 13209 | { |
| 13210 | DECL_NOT_REALLY_EXTERN (decl) = flags & 1; |
| 13211 | DECL_RESULT (decl) = result; |
| 13212 | DECL_INITIAL (decl) = initial; |
| 13213 | DECL_SAVED_TREE (decl) = saved; |
| 13214 | |
| 13215 | if (context) |
| 13216 | SET_DECL_FRIEND_CONTEXT (decl, context); |
| 13217 | if (cexpr.decl) |
| 13218 | register_constexpr_fundef (cexpr); |
| 13219 | |
| 13220 | if (coro_ramp) |
| 13221 | coro_set_ramp_function (decl, coro_ramp); |
| 13222 | else if (coro_actor && coro_destroy) |
| 13223 | coro_set_transform_functions (decl, coro_actor, coro_destroy); |
| 13224 | |
| 13225 | if (DECL_LOCAL_DECL_P (decl)) |
| 13226 | /* Block-scope OMP UDRs aren't real functions, and don't need a |
| 13227 | function structure to be allocated or to be expanded. */ |
| 13228 | gcc_checking_assert (DECL_OMP_DECLARE_REDUCTION_P (decl)); |
| 13229 | else |
| 13230 | post_process (data: pdata); |
| 13231 | } |
| 13232 | else if (maybe_dup) |
| 13233 | { |
| 13234 | // FIXME:QOI Check matching defn |
| 13235 | } |
| 13236 | |
| 13237 | return true; |
| 13238 | } |
| 13239 | |
| 13240 | /* Also for CONCEPT_DECLs. */ |
| 13241 | |
| 13242 | void |
| 13243 | trees_out::write_var_def (tree decl) |
| 13244 | { |
| 13245 | /* The initializer of a non-inline variable or variable template is |
| 13246 | ignored for determining exposures. */ |
| 13247 | auto ovr = dep_hash->ignore_exposure_if (VAR_P (decl) |
| 13248 | && !DECL_INLINE_VAR_P (decl)); |
| 13249 | |
| 13250 | tree init = DECL_INITIAL (decl); |
| 13251 | tree_node (t: init); |
| 13252 | if (!init) |
| 13253 | { |
| 13254 | tree dyn_init = NULL_TREE; |
| 13255 | |
| 13256 | /* We only need to write initializers in header modules. */ |
| 13257 | if (header_module_p () && DECL_NONTRIVIALLY_INITIALIZED_P (decl)) |
| 13258 | { |
| 13259 | dyn_init = value_member (decl, |
| 13260 | CP_DECL_THREAD_LOCAL_P (decl) |
| 13261 | ? tls_aggregates : static_aggregates); |
| 13262 | gcc_checking_assert (dyn_init); |
| 13263 | /* Mark it so write_inits knows this is needed. */ |
| 13264 | TREE_LANG_FLAG_0 (dyn_init) = true; |
| 13265 | dyn_init = TREE_PURPOSE (dyn_init); |
| 13266 | } |
| 13267 | tree_node (t: dyn_init); |
| 13268 | } |
| 13269 | } |
| 13270 | |
| 13271 | void |
| 13272 | trees_out::mark_var_def (tree) |
| 13273 | { |
| 13274 | } |
| 13275 | |
| 13276 | bool |
| 13277 | trees_in::read_var_def (tree decl, tree maybe_template) |
| 13278 | { |
| 13279 | /* Do not mark the virtual table entries as used. */ |
| 13280 | bool vtable = VAR_P (decl) && DECL_VTABLE_OR_VTT_P (decl); |
| 13281 | unused += vtable; |
| 13282 | tree init = tree_node (); |
| 13283 | tree dyn_init = init ? NULL_TREE : tree_node (); |
| 13284 | unused -= vtable; |
| 13285 | |
| 13286 | if (get_overrun ()) |
| 13287 | return false; |
| 13288 | |
| 13289 | bool initialized = (VAR_P (decl) ? bool (DECL_INITIALIZED_P (decl)) |
| 13290 | : bool (DECL_INITIAL (decl))); |
| 13291 | tree maybe_dup = odr_duplicate (maybe_existing: maybe_template, has_defn: initialized); |
| 13292 | bool installing = maybe_dup && !initialized; |
| 13293 | if (installing) |
| 13294 | { |
| 13295 | DECL_INITIAL (decl) = init; |
| 13296 | if (DECL_EXTERNAL (decl)) |
| 13297 | DECL_NOT_REALLY_EXTERN (decl) = true; |
| 13298 | if (VAR_P (decl)) |
| 13299 | { |
| 13300 | DECL_INITIALIZED_P (decl) = true; |
| 13301 | if (maybe_dup && DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (maybe_dup)) |
| 13302 | DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true; |
| 13303 | tentative_decl_linkage (decl); |
| 13304 | if (DECL_EXPLICIT_INSTANTIATION (decl) |
| 13305 | && !DECL_EXTERNAL (decl)) |
| 13306 | setup_explicit_instantiation_definition_linkage (decl); |
| 13307 | /* Class non-template static members are handled in read_class_def. |
| 13308 | But still handle specialisations of member templates. */ |
| 13309 | if ((!DECL_CLASS_SCOPE_P (decl) |
| 13310 | || primary_template_specialization_p (decl)) |
| 13311 | && (DECL_IMPLICIT_INSTANTIATION (decl) |
| 13312 | || (DECL_EXPLICIT_INSTANTIATION (decl) |
| 13313 | && !DECL_EXTERNAL (decl)))) |
| 13314 | note_vague_linkage_variable (decl); |
| 13315 | } |
| 13316 | if (!dyn_init) |
| 13317 | ; |
| 13318 | else if (CP_DECL_THREAD_LOCAL_P (decl)) |
| 13319 | tls_aggregates = tree_cons (dyn_init, decl, tls_aggregates); |
| 13320 | else |
| 13321 | static_aggregates = tree_cons (dyn_init, decl, static_aggregates); |
| 13322 | } |
| 13323 | else if (maybe_dup) |
| 13324 | { |
| 13325 | // FIXME:QOI Check matching defn |
| 13326 | } |
| 13327 | |
| 13328 | return true; |
| 13329 | } |
| 13330 | |
| 13331 | /* If MEMBER doesn't have an independent life outside the class, |
| 13332 | return it (or its TEMPLATE_DECL). Otherwise NULL. */ |
| 13333 | |
| 13334 | static tree |
| 13335 | member_owned_by_class (tree member) |
| 13336 | { |
| 13337 | gcc_assert (DECL_P (member)); |
| 13338 | |
| 13339 | /* Clones are owned by their origin. */ |
| 13340 | if (DECL_CLONED_FUNCTION_P (member)) |
| 13341 | return NULL; |
| 13342 | |
| 13343 | if (TREE_CODE (member) == FIELD_DECL) |
| 13344 | /* FIELD_DECLS can have template info in some cases. We always |
| 13345 | want the FIELD_DECL though, as there's never a TEMPLATE_DECL |
| 13346 | wrapping them. */ |
| 13347 | return member; |
| 13348 | |
| 13349 | int use_tpl = -1; |
| 13350 | if (tree ti = node_template_info (decl: member, use&: use_tpl)) |
| 13351 | { |
| 13352 | // FIXME: Don't bail on things that CANNOT have their own |
| 13353 | // template header. No, make sure they're in the same cluster. |
| 13354 | if (use_tpl > 0) |
| 13355 | return NULL_TREE; |
| 13356 | |
| 13357 | if (DECL_TEMPLATE_RESULT (TI_TEMPLATE (ti)) == member) |
| 13358 | member = TI_TEMPLATE (ti); |
| 13359 | } |
| 13360 | return member; |
| 13361 | } |
| 13362 | |
| 13363 | void |
| 13364 | trees_out::write_class_def (tree defn) |
| 13365 | { |
| 13366 | gcc_assert (DECL_P (defn)); |
| 13367 | if (streaming_p ()) |
| 13368 | dump () && dump ("Writing class definition %N" , defn); |
| 13369 | |
| 13370 | tree type = TREE_TYPE (defn); |
| 13371 | tree_node (TYPE_SIZE (type)); |
| 13372 | tree_node (TYPE_SIZE_UNIT (type)); |
| 13373 | tree_node (TYPE_VFIELD (type)); |
| 13374 | tree_node (TYPE_BINFO (type)); |
| 13375 | |
| 13376 | vec_chained_decls (TYPE_FIELDS (type)); |
| 13377 | |
| 13378 | /* Every class but __as_base has a type-specific. */ |
| 13379 | gcc_checking_assert (!TYPE_LANG_SPECIFIC (type) == IS_FAKE_BASE_TYPE (type)); |
| 13380 | |
| 13381 | if (TYPE_LANG_SPECIFIC (type)) |
| 13382 | { |
| 13383 | { |
| 13384 | vec<tree, va_gc> *v = CLASSTYPE_MEMBER_VEC (type); |
| 13385 | if (!v) |
| 13386 | { |
| 13387 | gcc_checking_assert (!streaming_p ()); |
| 13388 | /* Force a class vector. */ |
| 13389 | v = set_class_bindings (type, extra: -1); |
| 13390 | gcc_checking_assert (v); |
| 13391 | } |
| 13392 | |
| 13393 | unsigned len = v->length (); |
| 13394 | if (streaming_p ()) |
| 13395 | u (v: len); |
| 13396 | for (unsigned ix = 0; ix != len; ix++) |
| 13397 | { |
| 13398 | tree m = (*v)[ix]; |
| 13399 | if (TREE_CODE (m) == TYPE_DECL |
| 13400 | && DECL_ARTIFICIAL (m) |
| 13401 | && TYPE_STUB_DECL (TREE_TYPE (m)) == m) |
| 13402 | /* This is a using-decl for a type, or an anonymous |
| 13403 | struct (maybe with a typedef name). Write the type. */ |
| 13404 | m = TREE_TYPE (m); |
| 13405 | tree_node (t: m); |
| 13406 | } |
| 13407 | } |
| 13408 | tree_node (CLASSTYPE_LAMBDA_EXPR (type)); |
| 13409 | |
| 13410 | /* TYPE_CONTAINS_VPTR_P looks at the vbase vector, which the |
| 13411 | reader won't know at this point. */ |
| 13412 | int has_vptr = TYPE_CONTAINS_VPTR_P (type); |
| 13413 | |
| 13414 | if (streaming_p ()) |
| 13415 | { |
| 13416 | unsigned nvbases = vec_safe_length (CLASSTYPE_VBASECLASSES (type)); |
| 13417 | u (v: nvbases); |
| 13418 | i (v: has_vptr); |
| 13419 | } |
| 13420 | |
| 13421 | if (has_vptr) |
| 13422 | { |
| 13423 | tree_vec (CLASSTYPE_PURE_VIRTUALS (type)); |
| 13424 | tree_pair_vec (CLASSTYPE_VCALL_INDICES (type)); |
| 13425 | tree_node (CLASSTYPE_KEY_METHOD (type)); |
| 13426 | } |
| 13427 | } |
| 13428 | |
| 13429 | if (TYPE_LANG_SPECIFIC (type)) |
| 13430 | { |
| 13431 | tree_node (CLASSTYPE_PRIMARY_BINFO (type)); |
| 13432 | |
| 13433 | tree as_base = CLASSTYPE_AS_BASE (type); |
| 13434 | if (as_base) |
| 13435 | as_base = TYPE_NAME (as_base); |
| 13436 | tree_node (t: as_base); |
| 13437 | |
| 13438 | /* Write the vtables. */ |
| 13439 | tree vtables = CLASSTYPE_VTABLES (type); |
| 13440 | vec_chained_decls (decls: vtables); |
| 13441 | for (; vtables; vtables = TREE_CHAIN (vtables)) |
| 13442 | write_definition (decl: vtables); |
| 13443 | |
| 13444 | { |
| 13445 | /* Friend declarations in class definitions are ignored when |
| 13446 | determining exposures. */ |
| 13447 | auto ovr = dep_hash->ignore_exposure_if (cond: true); |
| 13448 | |
| 13449 | /* Write the friend classes. */ |
| 13450 | tree_list (CLASSTYPE_FRIEND_CLASSES (type), has_purpose: false); |
| 13451 | |
| 13452 | /* Write the friend functions. */ |
| 13453 | for (tree friends = DECL_FRIENDLIST (defn); |
| 13454 | friends; friends = TREE_CHAIN (friends)) |
| 13455 | { |
| 13456 | tree_node (FRIEND_NAME (friends)); |
| 13457 | tree_list (FRIEND_DECLS (friends), has_purpose: false); |
| 13458 | } |
| 13459 | /* End of friend fns. */ |
| 13460 | tree_node (NULL_TREE); |
| 13461 | } |
| 13462 | |
| 13463 | /* Write the decl list. We don't need to ignore exposures of friend |
| 13464 | decls here as any such decls should already have been added and |
| 13465 | ignored above. */ |
| 13466 | tree_list (CLASSTYPE_DECL_LIST (type), has_purpose: true); |
| 13467 | |
| 13468 | if (TYPE_CONTAINS_VPTR_P (type)) |
| 13469 | { |
| 13470 | /* Write the thunks. */ |
| 13471 | for (tree decls = TYPE_FIELDS (type); |
| 13472 | decls; decls = DECL_CHAIN (decls)) |
| 13473 | if (TREE_CODE (decls) == FUNCTION_DECL |
| 13474 | && DECL_VIRTUAL_P (decls) |
| 13475 | && DECL_THUNKS (decls)) |
| 13476 | { |
| 13477 | tree_node (t: decls); |
| 13478 | /* Thunks are always unique, so chaining is ok. */ |
| 13479 | chained_decls (DECL_THUNKS (decls)); |
| 13480 | } |
| 13481 | tree_node (NULL_TREE); |
| 13482 | } |
| 13483 | } |
| 13484 | } |
| 13485 | |
| 13486 | void |
| 13487 | trees_out::mark_class_member (tree member, bool do_defn) |
| 13488 | { |
| 13489 | gcc_assert (DECL_P (member)); |
| 13490 | |
| 13491 | member = member_owned_by_class (member); |
| 13492 | if (member) |
| 13493 | mark_declaration (decl: member, do_defn: do_defn && has_definition (decl: member)); |
| 13494 | } |
| 13495 | |
| 13496 | void |
| 13497 | trees_out::mark_class_def (tree defn) |
| 13498 | { |
| 13499 | gcc_assert (DECL_P (defn)); |
| 13500 | tree type = TREE_TYPE (defn); |
| 13501 | /* Mark the class members that are not type-decls and cannot have |
| 13502 | independent definitions. */ |
| 13503 | for (tree member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member)) |
| 13504 | if (TREE_CODE (member) == FIELD_DECL |
| 13505 | || TREE_CODE (member) == USING_DECL |
| 13506 | /* A cloned enum-decl from 'using enum unrelated;' */ |
| 13507 | || (TREE_CODE (member) == CONST_DECL |
| 13508 | && DECL_CONTEXT (member) == type)) |
| 13509 | { |
| 13510 | mark_class_member (member); |
| 13511 | if (TREE_CODE (member) == FIELD_DECL) |
| 13512 | if (tree repr = DECL_BIT_FIELD_REPRESENTATIVE (member)) |
| 13513 | /* If we're marking a class template definition, then |
| 13514 | this'll contain the width (as set by grokbitfield) |
| 13515 | instead of a decl. */ |
| 13516 | if (DECL_P (repr)) |
| 13517 | mark_declaration (decl: repr, do_defn: false); |
| 13518 | } |
| 13519 | |
| 13520 | /* Mark the binfo hierarchy. */ |
| 13521 | for (tree child = TYPE_BINFO (type); child; child = TREE_CHAIN (child)) |
| 13522 | mark_by_value (decl: child); |
| 13523 | |
| 13524 | if (TYPE_LANG_SPECIFIC (type)) |
| 13525 | { |
| 13526 | for (tree vtable = CLASSTYPE_VTABLES (type); |
| 13527 | vtable; vtable = TREE_CHAIN (vtable)) |
| 13528 | mark_declaration (decl: vtable, do_defn: true); |
| 13529 | |
| 13530 | if (TYPE_CONTAINS_VPTR_P (type)) |
| 13531 | /* Mark the thunks, they belong to the class definition, |
| 13532 | /not/ the thunked-to function. */ |
| 13533 | for (tree decls = TYPE_FIELDS (type); |
| 13534 | decls; decls = DECL_CHAIN (decls)) |
| 13535 | if (TREE_CODE (decls) == FUNCTION_DECL) |
| 13536 | for (tree thunks = DECL_THUNKS (decls); |
| 13537 | thunks; thunks = DECL_CHAIN (thunks)) |
| 13538 | mark_declaration (decl: thunks, do_defn: false); |
| 13539 | } |
| 13540 | } |
| 13541 | |
| 13542 | /* Nop sorting, needed for resorting the member vec. */ |
| 13543 | |
| 13544 | static void |
| 13545 | nop (void *, void *, void *) |
| 13546 | { |
| 13547 | } |
| 13548 | |
| 13549 | bool |
| 13550 | trees_in::read_class_def (tree defn, tree maybe_template) |
| 13551 | { |
| 13552 | gcc_assert (DECL_P (defn)); |
| 13553 | dump () && dump ("Reading class definition %N" , defn); |
| 13554 | tree type = TREE_TYPE (defn); |
| 13555 | tree size = tree_node (); |
| 13556 | tree size_unit = tree_node (); |
| 13557 | tree vfield = tree_node (); |
| 13558 | tree binfo = tree_node (); |
| 13559 | vec<tree, va_gc> *vbase_vec = NULL; |
| 13560 | vec<tree, va_gc> *member_vec = NULL; |
| 13561 | vec<tree, va_gc> *pure_virts = NULL; |
| 13562 | vec<tree_pair_s, va_gc> *vcall_indices = NULL; |
| 13563 | tree key_method = NULL_TREE; |
| 13564 | tree lambda = NULL_TREE; |
| 13565 | |
| 13566 | /* Read the fields. */ |
| 13567 | vec<tree, va_heap> *fields = vec_chained_decls (); |
| 13568 | |
| 13569 | if (TYPE_LANG_SPECIFIC (type)) |
| 13570 | { |
| 13571 | if (unsigned len = u ()) |
| 13572 | { |
| 13573 | vec_alloc (v&: member_vec, nelems: len); |
| 13574 | for (unsigned ix = 0; ix != len; ix++) |
| 13575 | { |
| 13576 | tree m = tree_node (); |
| 13577 | if (get_overrun ()) |
| 13578 | break; |
| 13579 | if (TYPE_P (m)) |
| 13580 | m = TYPE_STUB_DECL (m); |
| 13581 | member_vec->quick_push (obj: m); |
| 13582 | } |
| 13583 | } |
| 13584 | lambda = tree_node (); |
| 13585 | |
| 13586 | if (!get_overrun ()) |
| 13587 | { |
| 13588 | unsigned nvbases = u (); |
| 13589 | if (nvbases) |
| 13590 | { |
| 13591 | vec_alloc (v&: vbase_vec, nelems: nvbases); |
| 13592 | for (tree child = binfo; child; child = TREE_CHAIN (child)) |
| 13593 | if (BINFO_VIRTUAL_P (child)) |
| 13594 | vbase_vec->quick_push (obj: child); |
| 13595 | } |
| 13596 | } |
| 13597 | |
| 13598 | if (!get_overrun ()) |
| 13599 | { |
| 13600 | int has_vptr = i (); |
| 13601 | if (has_vptr) |
| 13602 | { |
| 13603 | pure_virts = tree_vec (); |
| 13604 | vcall_indices = tree_pair_vec (); |
| 13605 | key_method = tree_node (); |
| 13606 | } |
| 13607 | } |
| 13608 | } |
| 13609 | |
| 13610 | tree maybe_dup = odr_duplicate (maybe_existing: maybe_template, TYPE_SIZE (type)); |
| 13611 | bool installing = maybe_dup && !TYPE_SIZE (type); |
| 13612 | if (installing) |
| 13613 | { |
| 13614 | if (maybe_dup != defn) |
| 13615 | { |
| 13616 | // FIXME: This is needed on other defns too, almost |
| 13617 | // duplicate-decl like? See is_matching_decl too. |
| 13618 | /* Copy flags from the duplicate. */ |
| 13619 | tree type_dup = TREE_TYPE (maybe_dup); |
| 13620 | |
| 13621 | /* Core pieces. */ |
| 13622 | TYPE_MODE_RAW (type) = TYPE_MODE_RAW (type_dup); |
| 13623 | TYPE_ALIGN_RAW (type) = TYPE_ALIGN_RAW (type_dup); |
| 13624 | TYPE_WARN_IF_NOT_ALIGN_RAW (type) |
| 13625 | = TYPE_WARN_IF_NOT_ALIGN_RAW (type_dup); |
| 13626 | TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (type_dup); |
| 13627 | |
| 13628 | SET_DECL_MODE (defn, DECL_MODE (maybe_dup)); |
| 13629 | DECL_SIZE (defn) = DECL_SIZE (maybe_dup); |
| 13630 | DECL_SIZE_UNIT (defn) = DECL_SIZE_UNIT (maybe_dup); |
| 13631 | DECL_ALIGN_RAW (defn) = DECL_ALIGN_RAW (maybe_dup); |
| 13632 | DECL_WARN_IF_NOT_ALIGN_RAW (defn) |
| 13633 | = DECL_WARN_IF_NOT_ALIGN_RAW (maybe_dup); |
| 13634 | DECL_USER_ALIGN (defn) = DECL_USER_ALIGN (maybe_dup); |
| 13635 | |
| 13636 | TYPE_TYPELESS_STORAGE (type) = TYPE_TYPELESS_STORAGE (type_dup); |
| 13637 | TYPE_CXX_ODR_P (type) = TYPE_CXX_ODR_P (type_dup); |
| 13638 | TYPE_NO_FORCE_BLK (type) = TYPE_NO_FORCE_BLK (type_dup); |
| 13639 | TYPE_TRANSPARENT_AGGR (type) = TYPE_TRANSPARENT_AGGR (type_dup); |
| 13640 | TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) |
| 13641 | = TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type_dup); |
| 13642 | |
| 13643 | TYPE_EMPTY_P (type) = TYPE_EMPTY_P (type_dup); |
| 13644 | TREE_ADDRESSABLE (type) = TREE_ADDRESSABLE (type_dup); |
| 13645 | |
| 13646 | /* C++ pieces. */ |
| 13647 | TYPE_POLYMORPHIC_P (type) = TYPE_POLYMORPHIC_P (type_dup); |
| 13648 | CLASSTYPE_FINAL (type) = CLASSTYPE_FINAL (type_dup); |
| 13649 | |
| 13650 | TYPE_HAS_USER_CONSTRUCTOR (type) |
| 13651 | = TYPE_HAS_USER_CONSTRUCTOR (type_dup); |
| 13652 | TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type) |
| 13653 | = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type_dup); |
| 13654 | TYPE_NEEDS_CONSTRUCTING (type) |
| 13655 | = TYPE_NEEDS_CONSTRUCTING (type_dup); |
| 13656 | |
| 13657 | if (auto ls = TYPE_LANG_SPECIFIC (type_dup)) |
| 13658 | { |
| 13659 | if (TYPE_LANG_SPECIFIC (type)) |
| 13660 | { |
| 13661 | CLASSTYPE_BEFRIENDING_CLASSES (type_dup) |
| 13662 | = CLASSTYPE_BEFRIENDING_CLASSES (type); |
| 13663 | if (!ANON_AGGR_TYPE_P (type)) |
| 13664 | CLASSTYPE_TYPEINFO_VAR (type_dup) |
| 13665 | = CLASSTYPE_TYPEINFO_VAR (type); |
| 13666 | } |
| 13667 | for (tree v = type; v; v = TYPE_NEXT_VARIANT (v)) |
| 13668 | TYPE_LANG_SPECIFIC (v) = ls; |
| 13669 | } |
| 13670 | } |
| 13671 | |
| 13672 | TYPE_SIZE (type) = size; |
| 13673 | TYPE_SIZE_UNIT (type) = size_unit; |
| 13674 | |
| 13675 | if (fields) |
| 13676 | { |
| 13677 | tree *chain = &TYPE_FIELDS (type); |
| 13678 | unsigned len = fields->length (); |
| 13679 | for (unsigned ix = 0; ix != len; ix++) |
| 13680 | { |
| 13681 | tree decl = (*fields)[ix]; |
| 13682 | |
| 13683 | if (!decl) |
| 13684 | { |
| 13685 | /* An anonymous struct with typedef name. */ |
| 13686 | tree tdef = (*fields)[ix+1]; |
| 13687 | decl = TYPE_STUB_DECL (TREE_TYPE (tdef)); |
| 13688 | gcc_checking_assert (IDENTIFIER_ANON_P (DECL_NAME (decl)) |
| 13689 | && decl != tdef); |
| 13690 | } |
| 13691 | |
| 13692 | gcc_checking_assert (!*chain == !DECL_CLONED_FUNCTION_P (decl)); |
| 13693 | *chain = decl; |
| 13694 | chain = &DECL_CHAIN (decl); |
| 13695 | |
| 13696 | if (TREE_CODE (decl) == FIELD_DECL |
| 13697 | && ANON_AGGR_TYPE_P (TREE_TYPE (decl))) |
| 13698 | { |
| 13699 | tree anon_type = TYPE_MAIN_VARIANT (TREE_TYPE (decl)); |
| 13700 | if (DECL_NAME (defn) == as_base_identifier) |
| 13701 | /* ANON_AGGR_TYPE_FIELD should already point to the |
| 13702 | original FIELD_DECL; don't overwrite it to point |
| 13703 | to the as-base FIELD_DECL copy. */ |
| 13704 | gcc_checking_assert (ANON_AGGR_TYPE_FIELD (anon_type)); |
| 13705 | else |
| 13706 | ANON_AGGR_TYPE_FIELD (anon_type) = decl; |
| 13707 | } |
| 13708 | |
| 13709 | if (TREE_CODE (decl) == USING_DECL |
| 13710 | && TREE_CODE (USING_DECL_SCOPE (decl)) == RECORD_TYPE) |
| 13711 | { |
| 13712 | /* Reconstruct DECL_ACCESS. */ |
| 13713 | tree decls = USING_DECL_DECLS (decl); |
| 13714 | tree access = declared_access (decl); |
| 13715 | |
| 13716 | for (ovl_iterator iter (decls); iter; ++iter) |
| 13717 | { |
| 13718 | tree d = *iter; |
| 13719 | |
| 13720 | retrofit_lang_decl (d); |
| 13721 | tree list = DECL_ACCESS (d); |
| 13722 | |
| 13723 | if (!purpose_member (type, list)) |
| 13724 | DECL_ACCESS (d) = tree_cons (type, access, list); |
| 13725 | } |
| 13726 | } |
| 13727 | |
| 13728 | if (TREE_CODE (decl) == VAR_DECL |
| 13729 | && TREE_CODE (maybe_template) != TEMPLATE_DECL) |
| 13730 | note_vague_linkage_variable (decl); |
| 13731 | } |
| 13732 | } |
| 13733 | |
| 13734 | TYPE_VFIELD (type) = vfield; |
| 13735 | TYPE_BINFO (type) = binfo; |
| 13736 | |
| 13737 | if (TYPE_LANG_SPECIFIC (type)) |
| 13738 | { |
| 13739 | if (!TYPE_POLYMORPHIC_P (type)) |
| 13740 | SET_CLASSTYPE_LAMBDA_EXPR (type, lambda); |
| 13741 | else |
| 13742 | gcc_checking_assert (lambda == NULL_TREE); |
| 13743 | |
| 13744 | CLASSTYPE_MEMBER_VEC (type) = member_vec; |
| 13745 | CLASSTYPE_PURE_VIRTUALS (type) = pure_virts; |
| 13746 | CLASSTYPE_VCALL_INDICES (type) = vcall_indices; |
| 13747 | |
| 13748 | if (TYPE_POLYMORPHIC_P (type)) |
| 13749 | SET_CLASSTYPE_KEY_METHOD (type, key_method); |
| 13750 | else |
| 13751 | gcc_checking_assert (key_method == NULL_TREE); |
| 13752 | |
| 13753 | CLASSTYPE_VBASECLASSES (type) = vbase_vec; |
| 13754 | |
| 13755 | /* Resort the member vector. */ |
| 13756 | resort_type_member_vec (member_vec, NULL, nop, NULL); |
| 13757 | } |
| 13758 | } |
| 13759 | else if (maybe_dup) |
| 13760 | { |
| 13761 | // FIXME:QOI Check matching defn |
| 13762 | } |
| 13763 | |
| 13764 | if (TYPE_LANG_SPECIFIC (type)) |
| 13765 | { |
| 13766 | tree primary = tree_node (); |
| 13767 | tree as_base = tree_node (); |
| 13768 | |
| 13769 | if (as_base) |
| 13770 | as_base = TREE_TYPE (as_base); |
| 13771 | |
| 13772 | /* Read the vtables. */ |
| 13773 | vec<tree, va_heap> *vtables = vec_chained_decls (); |
| 13774 | if (vtables) |
| 13775 | { |
| 13776 | unsigned len = vtables->length (); |
| 13777 | for (unsigned ix = 0; ix != len; ix++) |
| 13778 | { |
| 13779 | tree vtable = (*vtables)[ix]; |
| 13780 | read_var_def (decl: vtable, maybe_template: vtable); |
| 13781 | } |
| 13782 | } |
| 13783 | |
| 13784 | tree friend_classes = tree_list (has_purpose: false); |
| 13785 | tree friend_functions = NULL_TREE; |
| 13786 | for (tree *chain = &friend_functions; |
| 13787 | tree name = tree_node (); chain = &TREE_CHAIN (*chain)) |
| 13788 | { |
| 13789 | tree val = tree_list (has_purpose: false); |
| 13790 | *chain = build_tree_list (name, val); |
| 13791 | } |
| 13792 | tree decl_list = tree_list (has_purpose: true); |
| 13793 | |
| 13794 | if (installing) |
| 13795 | { |
| 13796 | CLASSTYPE_PRIMARY_BINFO (type) = primary; |
| 13797 | CLASSTYPE_AS_BASE (type) = as_base; |
| 13798 | |
| 13799 | if (vtables) |
| 13800 | { |
| 13801 | if ((!CLASSTYPE_KEY_METHOD (type) |
| 13802 | /* Sneaky user may have defined it inline |
| 13803 | out-of-class. */ |
| 13804 | || DECL_DECLARED_INLINE_P (CLASSTYPE_KEY_METHOD (type))) |
| 13805 | /* An imported non-template class attached to a module |
| 13806 | doesn't need to have its vtables emitted here. */ |
| 13807 | && (CLASSTYPE_USE_TEMPLATE (type) |
| 13808 | || !DECL_MODULE_ATTACH_P (defn))) |
| 13809 | vec_safe_push (v&: keyed_classes, obj: type); |
| 13810 | unsigned len = vtables->length (); |
| 13811 | tree *chain = &CLASSTYPE_VTABLES (type); |
| 13812 | for (unsigned ix = 0; ix != len; ix++) |
| 13813 | { |
| 13814 | tree vtable = (*vtables)[ix]; |
| 13815 | gcc_checking_assert (!*chain); |
| 13816 | *chain = vtable; |
| 13817 | chain = &DECL_CHAIN (vtable); |
| 13818 | } |
| 13819 | } |
| 13820 | CLASSTYPE_FRIEND_CLASSES (type) = friend_classes; |
| 13821 | DECL_FRIENDLIST (defn) = friend_functions; |
| 13822 | CLASSTYPE_DECL_LIST (type) = decl_list; |
| 13823 | |
| 13824 | for (; friend_classes; friend_classes = TREE_CHAIN (friend_classes)) |
| 13825 | { |
| 13826 | tree f = TREE_VALUE (friend_classes); |
| 13827 | if (TREE_CODE (f) == TEMPLATE_DECL) |
| 13828 | f = TREE_TYPE (f); |
| 13829 | |
| 13830 | if (CLASS_TYPE_P (f)) |
| 13831 | { |
| 13832 | CLASSTYPE_BEFRIENDING_CLASSES (f) |
| 13833 | = tree_cons (NULL_TREE, type, |
| 13834 | CLASSTYPE_BEFRIENDING_CLASSES (f)); |
| 13835 | dump () && dump ("Class %N befriending %C:%N" , |
| 13836 | type, TREE_CODE (f), f); |
| 13837 | } |
| 13838 | } |
| 13839 | |
| 13840 | for (; friend_functions; |
| 13841 | friend_functions = TREE_CHAIN (friend_functions)) |
| 13842 | for (tree friend_decls = TREE_VALUE (friend_functions); |
| 13843 | friend_decls; friend_decls = TREE_CHAIN (friend_decls)) |
| 13844 | { |
| 13845 | tree f = TREE_VALUE (friend_decls); |
| 13846 | if (TREE_CODE (f) == TU_LOCAL_ENTITY) |
| 13847 | continue; |
| 13848 | |
| 13849 | DECL_BEFRIENDING_CLASSES (f) |
| 13850 | = tree_cons (NULL_TREE, type, DECL_BEFRIENDING_CLASSES (f)); |
| 13851 | dump () && dump ("Class %N befriending %C:%N" , |
| 13852 | type, TREE_CODE (f), f); |
| 13853 | } |
| 13854 | } |
| 13855 | |
| 13856 | if (TYPE_CONTAINS_VPTR_P (type)) |
| 13857 | /* Read and install the thunks. */ |
| 13858 | while (tree vfunc = tree_node ()) |
| 13859 | { |
| 13860 | tree thunks = chained_decls (); |
| 13861 | if (installing) |
| 13862 | SET_DECL_THUNKS (vfunc, thunks); |
| 13863 | } |
| 13864 | |
| 13865 | vec_free (v&: vtables); |
| 13866 | } |
| 13867 | |
| 13868 | /* Propagate to all variants. */ |
| 13869 | if (installing) |
| 13870 | fixup_type_variants (type); |
| 13871 | |
| 13872 | /* IS_FAKE_BASE_TYPE is inaccurate at this point, because if this is |
| 13873 | the fake base, we've not hooked it into the containing class's |
| 13874 | data structure yet. Fortunately it has a unique name. */ |
| 13875 | if (installing |
| 13876 | && DECL_NAME (defn) != as_base_identifier |
| 13877 | && (!CLASSTYPE_TEMPLATE_INFO (type) |
| 13878 | || !uses_template_parms (TI_ARGS (CLASSTYPE_TEMPLATE_INFO (type))))) |
| 13879 | /* Emit debug info. It'd be nice to know if the interface TU |
| 13880 | already emitted this. */ |
| 13881 | rest_of_type_compilation (type, !LOCAL_CLASS_P (type)); |
| 13882 | |
| 13883 | vec_free (v&: fields); |
| 13884 | |
| 13885 | return !get_overrun (); |
| 13886 | } |
| 13887 | |
| 13888 | void |
| 13889 | trees_out::write_enum_def (tree decl) |
| 13890 | { |
| 13891 | tree type = TREE_TYPE (decl); |
| 13892 | |
| 13893 | tree_node (TYPE_VALUES (type)); |
| 13894 | /* Note that we stream TYPE_MIN/MAX_VALUE directly as part of the |
| 13895 | ENUMERAL_TYPE. */ |
| 13896 | } |
| 13897 | |
| 13898 | void |
| 13899 | trees_out::mark_enum_def (tree decl) |
| 13900 | { |
| 13901 | tree type = TREE_TYPE (decl); |
| 13902 | |
| 13903 | for (tree values = TYPE_VALUES (type); values; values = TREE_CHAIN (values)) |
| 13904 | { |
| 13905 | tree cst = TREE_VALUE (values); |
| 13906 | mark_by_value (decl: cst); |
| 13907 | /* We must mark the init to avoid circularity in tt_enum_int. */ |
| 13908 | if (tree init = DECL_INITIAL (cst)) |
| 13909 | if (TREE_CODE (init) == INTEGER_CST) |
| 13910 | mark_by_value (decl: init); |
| 13911 | } |
| 13912 | } |
| 13913 | |
| 13914 | bool |
| 13915 | trees_in::read_enum_def (tree defn, tree maybe_template) |
| 13916 | { |
| 13917 | tree type = TREE_TYPE (defn); |
| 13918 | tree values = tree_node (); |
| 13919 | |
| 13920 | if (get_overrun ()) |
| 13921 | return false; |
| 13922 | |
| 13923 | tree maybe_dup = odr_duplicate (maybe_existing: maybe_template, TYPE_VALUES (type)); |
| 13924 | bool installing = maybe_dup && !TYPE_VALUES (type); |
| 13925 | |
| 13926 | if (installing) |
| 13927 | { |
| 13928 | TYPE_VALUES (type) = values; |
| 13929 | /* Note that we stream TYPE_MIN/MAX_VALUE directly as part of the |
| 13930 | ENUMERAL_TYPE. */ |
| 13931 | |
| 13932 | rest_of_type_compilation (type, DECL_NAMESPACE_SCOPE_P (defn)); |
| 13933 | } |
| 13934 | else if (maybe_dup) |
| 13935 | { |
| 13936 | tree known = TYPE_VALUES (type); |
| 13937 | for (; known && values; |
| 13938 | known = TREE_CHAIN (known), values = TREE_CHAIN (values)) |
| 13939 | { |
| 13940 | tree known_decl = TREE_VALUE (known); |
| 13941 | tree new_decl = TREE_VALUE (values); |
| 13942 | |
| 13943 | if (DECL_NAME (known_decl) != DECL_NAME (new_decl)) |
| 13944 | break; |
| 13945 | |
| 13946 | new_decl = maybe_duplicate (decl: new_decl); |
| 13947 | |
| 13948 | if (!cp_tree_equal (DECL_INITIAL (known_decl), |
| 13949 | DECL_INITIAL (new_decl))) |
| 13950 | break; |
| 13951 | } |
| 13952 | |
| 13953 | if (known || values) |
| 13954 | { |
| 13955 | auto_diagnostic_group d; |
| 13956 | error_at (DECL_SOURCE_LOCATION (maybe_dup), |
| 13957 | "definition of %qD does not match" , maybe_dup); |
| 13958 | inform (DECL_SOURCE_LOCATION (defn), |
| 13959 | "existing definition %qD" , defn); |
| 13960 | |
| 13961 | tree known_decl = NULL_TREE, new_decl = NULL_TREE; |
| 13962 | |
| 13963 | if (known) |
| 13964 | known_decl = TREE_VALUE (known); |
| 13965 | if (values) |
| 13966 | new_decl = maybe_duplicate (TREE_VALUE (values)); |
| 13967 | |
| 13968 | if (known_decl && new_decl) |
| 13969 | { |
| 13970 | inform (DECL_SOURCE_LOCATION (new_decl), |
| 13971 | "enumerator %qD does not match ..." , new_decl); |
| 13972 | inform (DECL_SOURCE_LOCATION (known_decl), |
| 13973 | "... this enumerator %qD" , known_decl); |
| 13974 | } |
| 13975 | else if (known_decl || new_decl) |
| 13976 | { |
| 13977 | tree = known_decl ? known_decl : new_decl; |
| 13978 | inform (DECL_SOURCE_LOCATION (extra), |
| 13979 | "additional enumerators beginning with %qD" , extra); |
| 13980 | } |
| 13981 | else |
| 13982 | inform (DECL_SOURCE_LOCATION (maybe_dup), |
| 13983 | "enumeration range differs" ); |
| 13984 | |
| 13985 | /* Mark it bad. */ |
| 13986 | unmatched_duplicate (existing: maybe_template); |
| 13987 | } |
| 13988 | } |
| 13989 | |
| 13990 | return true; |
| 13991 | } |
| 13992 | |
| 13993 | /* Write out the body of DECL. See above circularity note. */ |
| 13994 | |
| 13995 | void |
| 13996 | trees_out::write_definition (tree decl, bool refs_tu_local) |
| 13997 | { |
| 13998 | auto ovr = make_temp_override (var&: writing_local_entities, |
| 13999 | overrider: writing_local_entities || refs_tu_local); |
| 14000 | |
| 14001 | if (streaming_p ()) |
| 14002 | { |
| 14003 | assert_definition (decl); |
| 14004 | dump () |
| 14005 | && dump ("Writing definition %C:%N" , TREE_CODE (decl), decl); |
| 14006 | } |
| 14007 | else |
| 14008 | dump (dumper::DEPEND) |
| 14009 | && dump ("Depending definition %C:%N" , TREE_CODE (decl), decl); |
| 14010 | |
| 14011 | again: |
| 14012 | switch (TREE_CODE (decl)) |
| 14013 | { |
| 14014 | default: |
| 14015 | gcc_unreachable (); |
| 14016 | |
| 14017 | case TEMPLATE_DECL: |
| 14018 | decl = DECL_TEMPLATE_RESULT (decl); |
| 14019 | goto again; |
| 14020 | |
| 14021 | case FUNCTION_DECL: |
| 14022 | write_function_def (decl); |
| 14023 | break; |
| 14024 | |
| 14025 | case TYPE_DECL: |
| 14026 | { |
| 14027 | tree type = TREE_TYPE (decl); |
| 14028 | gcc_assert (TYPE_MAIN_VARIANT (type) == type |
| 14029 | && TYPE_NAME (type) == decl); |
| 14030 | if (TREE_CODE (type) == ENUMERAL_TYPE) |
| 14031 | write_enum_def (decl); |
| 14032 | else |
| 14033 | write_class_def (defn: decl); |
| 14034 | } |
| 14035 | break; |
| 14036 | |
| 14037 | case VAR_DECL: |
| 14038 | case CONCEPT_DECL: |
| 14039 | write_var_def (decl); |
| 14040 | break; |
| 14041 | } |
| 14042 | } |
| 14043 | |
| 14044 | /* Mark a declaration for by-value walking. If DO_DEFN is true, mark |
| 14045 | its body too. */ |
| 14046 | |
| 14047 | void |
| 14048 | trees_out::mark_declaration (tree decl, bool do_defn) |
| 14049 | { |
| 14050 | mark_by_value (decl); |
| 14051 | |
| 14052 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 14053 | decl = DECL_TEMPLATE_RESULT (decl); |
| 14054 | |
| 14055 | if (!do_defn) |
| 14056 | return; |
| 14057 | |
| 14058 | switch (TREE_CODE (decl)) |
| 14059 | { |
| 14060 | default: |
| 14061 | gcc_unreachable (); |
| 14062 | |
| 14063 | case FUNCTION_DECL: |
| 14064 | mark_function_def (decl); |
| 14065 | break; |
| 14066 | |
| 14067 | case TYPE_DECL: |
| 14068 | { |
| 14069 | tree type = TREE_TYPE (decl); |
| 14070 | gcc_assert (TYPE_MAIN_VARIANT (type) == type |
| 14071 | && TYPE_NAME (type) == decl); |
| 14072 | if (TREE_CODE (type) == ENUMERAL_TYPE) |
| 14073 | mark_enum_def (decl); |
| 14074 | else |
| 14075 | mark_class_def (defn: decl); |
| 14076 | } |
| 14077 | break; |
| 14078 | |
| 14079 | case VAR_DECL: |
| 14080 | case CONCEPT_DECL: |
| 14081 | mark_var_def (decl); |
| 14082 | break; |
| 14083 | } |
| 14084 | } |
| 14085 | |
| 14086 | /* Read in the body of DECL. See above circularity note. */ |
| 14087 | |
| 14088 | bool |
| 14089 | trees_in::read_definition (tree decl) |
| 14090 | { |
| 14091 | dump () && dump ("Reading definition %C %N" , TREE_CODE (decl), decl); |
| 14092 | |
| 14093 | tree maybe_template = decl; |
| 14094 | |
| 14095 | again: |
| 14096 | switch (TREE_CODE (decl)) |
| 14097 | { |
| 14098 | default: |
| 14099 | break; |
| 14100 | |
| 14101 | case TEMPLATE_DECL: |
| 14102 | decl = DECL_TEMPLATE_RESULT (decl); |
| 14103 | goto again; |
| 14104 | |
| 14105 | case FUNCTION_DECL: |
| 14106 | return read_function_def (decl, maybe_template); |
| 14107 | |
| 14108 | case TYPE_DECL: |
| 14109 | { |
| 14110 | tree type = TREE_TYPE (decl); |
| 14111 | gcc_assert (TYPE_MAIN_VARIANT (type) == type |
| 14112 | && TYPE_NAME (type) == decl); |
| 14113 | if (TREE_CODE (type) == ENUMERAL_TYPE) |
| 14114 | return read_enum_def (defn: decl, maybe_template); |
| 14115 | else |
| 14116 | return read_class_def (defn: decl, maybe_template); |
| 14117 | } |
| 14118 | break; |
| 14119 | |
| 14120 | case VAR_DECL: |
| 14121 | case CONCEPT_DECL: |
| 14122 | return read_var_def (decl, maybe_template); |
| 14123 | } |
| 14124 | |
| 14125 | return false; |
| 14126 | } |
| 14127 | |
| 14128 | /* Lookup an maybe insert a slot for depset for KEY. */ |
| 14129 | |
| 14130 | depset ** |
| 14131 | depset::hash::entity_slot (tree entity, bool insert) |
| 14132 | { |
| 14133 | traits::compare_type key (entity, NULL); |
| 14134 | depset **slot = find_slot_with_hash (comparable: key, hash: traits::hash (p: key), |
| 14135 | insert: insert ? INSERT : NO_INSERT); |
| 14136 | |
| 14137 | return slot; |
| 14138 | } |
| 14139 | |
| 14140 | depset ** |
| 14141 | depset::hash::binding_slot (tree ctx, tree name, bool insert) |
| 14142 | { |
| 14143 | traits::compare_type key (ctx, name); |
| 14144 | depset **slot = find_slot_with_hash (comparable: key, hash: traits::hash (p: key), |
| 14145 | insert: insert ? INSERT : NO_INSERT); |
| 14146 | |
| 14147 | return slot; |
| 14148 | } |
| 14149 | |
| 14150 | depset * |
| 14151 | depset::hash::find_dependency (tree decl) |
| 14152 | { |
| 14153 | depset **slot = entity_slot (entity: decl, insert: false); |
| 14154 | |
| 14155 | return slot ? *slot : NULL; |
| 14156 | } |
| 14157 | |
| 14158 | depset * |
| 14159 | depset::hash::find_binding (tree ctx, tree name) |
| 14160 | { |
| 14161 | depset **slot = binding_slot (ctx, name, insert: false); |
| 14162 | |
| 14163 | return slot ? *slot : NULL; |
| 14164 | } |
| 14165 | |
| 14166 | static bool is_tu_local_entity (tree decl, bool explain = false); |
| 14167 | static bool is_tu_local_value (tree decl, tree expr, bool explain = false); |
| 14168 | static bool has_tu_local_tmpl_arg (tree decl, tree args, bool explain); |
| 14169 | |
| 14170 | /* Returns true if DECL is a TU-local entity, as defined by [basic.link]. |
| 14171 | If EXPLAIN is true, emit an informative note about why DECL is TU-local. */ |
| 14172 | |
| 14173 | static bool |
| 14174 | is_tu_local_entity (tree decl, bool explain/*=false*/) |
| 14175 | { |
| 14176 | gcc_checking_assert (DECL_P (decl)); |
| 14177 | location_t loc = DECL_SOURCE_LOCATION (decl); |
| 14178 | tree type = TREE_TYPE (decl); |
| 14179 | |
| 14180 | /* Only types, functions, variables, and template (specialisations) |
| 14181 | can be TU-local. */ |
| 14182 | if (TREE_CODE (decl) != TYPE_DECL |
| 14183 | && TREE_CODE (decl) != FUNCTION_DECL |
| 14184 | && TREE_CODE (decl) != VAR_DECL |
| 14185 | && TREE_CODE (decl) != TEMPLATE_DECL) |
| 14186 | return false; |
| 14187 | |
| 14188 | /* An explicit type alias is not an entity; we don't want to stream |
| 14189 | such aliases if they refer to TU-local entities, so propagate this |
| 14190 | from the original type. The built-in declarations of 'int' and such |
| 14191 | are never TU-local. */ |
| 14192 | if (TREE_CODE (decl) == TYPE_DECL |
| 14193 | && !DECL_SELF_REFERENCE_P (decl) |
| 14194 | && !DECL_IMPLICIT_TYPEDEF_P (decl)) |
| 14195 | { |
| 14196 | tree orig = DECL_ORIGINAL_TYPE (decl); |
| 14197 | if (orig && TYPE_NAME (orig)) |
| 14198 | { |
| 14199 | if (explain) |
| 14200 | inform (loc, "%qD is an alias of TU-local type %qT" , decl, orig); |
| 14201 | return is_tu_local_entity (TYPE_NAME (orig), explain); |
| 14202 | } |
| 14203 | else |
| 14204 | return false; |
| 14205 | } |
| 14206 | |
| 14207 | /* Check specializations first for slightly better explanations. */ |
| 14208 | int use_tpl = -1; |
| 14209 | tree ti = node_template_info (decl, use&: use_tpl); |
| 14210 | if (use_tpl > 0 && TREE_CODE (TI_TEMPLATE (ti)) == TEMPLATE_DECL) |
| 14211 | { |
| 14212 | /* A specialization of a TU-local template. */ |
| 14213 | tree tmpl = TI_TEMPLATE (ti); |
| 14214 | if (is_tu_local_entity (decl: tmpl)) |
| 14215 | { |
| 14216 | if (explain) |
| 14217 | { |
| 14218 | inform (loc, "%qD is a specialization of TU-local template %qD" , |
| 14219 | decl, tmpl); |
| 14220 | is_tu_local_entity (decl: tmpl, /*explain=*/true); |
| 14221 | } |
| 14222 | return true; |
| 14223 | } |
| 14224 | |
| 14225 | /* A specialization of a template with any TU-local template argument. */ |
| 14226 | if (has_tu_local_tmpl_arg (decl, TI_ARGS (ti), explain)) |
| 14227 | return true; |
| 14228 | |
| 14229 | /* FIXME A specialization of a template whose (possibly instantiated) |
| 14230 | declaration is an exposure. This should always be covered by the |
| 14231 | above cases?? */ |
| 14232 | } |
| 14233 | |
| 14234 | /* A type, function, variable, or template with internal linkage. */ |
| 14235 | linkage_kind kind = decl_linkage (decl); |
| 14236 | if (kind == lk_internal |
| 14237 | /* But although weakrefs are marked static, don't consider them |
| 14238 | to be TU-local. */ |
| 14239 | && !lookup_attribute (attr_name: "weakref" , DECL_ATTRIBUTES (decl))) |
| 14240 | { |
| 14241 | if (explain) |
| 14242 | inform (loc, "%qD declared with internal linkage" , decl); |
| 14243 | return true; |
| 14244 | } |
| 14245 | |
| 14246 | /* Does not have a name with linkage and is declared, or introduced by a |
| 14247 | lambda-expression, within the definition of a TU-local entity. */ |
| 14248 | if (kind == lk_none) |
| 14249 | { |
| 14250 | tree ctx = CP_DECL_CONTEXT (decl); |
| 14251 | if (LAMBDA_TYPE_P (type)) |
| 14252 | if (tree = LAMBDA_TYPE_EXTRA_SCOPE (type)) |
| 14253 | ctx = extra; |
| 14254 | |
| 14255 | if (TREE_CODE (ctx) == NAMESPACE_DECL) |
| 14256 | { |
| 14257 | if (!TREE_PUBLIC (ctx)) |
| 14258 | { |
| 14259 | if (explain) |
| 14260 | inform (loc, "%qD has no linkage and is declared in an " |
| 14261 | "anonymous namespace" , decl); |
| 14262 | return true; |
| 14263 | } |
| 14264 | } |
| 14265 | else if (TYPE_P (ctx)) |
| 14266 | { |
| 14267 | tree ctx_decl = TYPE_MAIN_DECL (ctx); |
| 14268 | if (is_tu_local_entity (decl: ctx_decl)) |
| 14269 | { |
| 14270 | if (explain) |
| 14271 | { |
| 14272 | inform (loc, "%qD has no linkage and is declared within " |
| 14273 | "TU-local entity %qT" , decl, ctx); |
| 14274 | is_tu_local_entity (decl: ctx_decl, /*explain=*/true); |
| 14275 | } |
| 14276 | return true; |
| 14277 | } |
| 14278 | } |
| 14279 | else if (is_tu_local_entity (decl: ctx)) |
| 14280 | { |
| 14281 | if (explain) |
| 14282 | { |
| 14283 | inform (loc, "%qD has no linkage and is declared within " |
| 14284 | "TU-local entity %qD" , decl, ctx); |
| 14285 | is_tu_local_entity (decl: ctx, /*explain=*/true); |
| 14286 | } |
| 14287 | return true; |
| 14288 | } |
| 14289 | } |
| 14290 | |
| 14291 | /* A type with no name that is defined outside a class-specifier, function |
| 14292 | body, or initializer; or is introduced by a defining-type-specifier that |
| 14293 | is used to declare only TU-local entities. |
| 14294 | |
| 14295 | We consider types with names for linkage purposes as having names, since |
| 14296 | these aren't really TU-local. */ |
| 14297 | tree inner = STRIP_TEMPLATE (decl); |
| 14298 | if (inner |
| 14299 | && TREE_CODE (inner) == TYPE_DECL |
| 14300 | && TYPE_ANON_P (type) |
| 14301 | && !DECL_SELF_REFERENCE_P (inner) |
| 14302 | /* An enum with an enumerator name for linkage. */ |
| 14303 | && !(UNSCOPED_ENUM_P (type) && TYPE_VALUES (type))) |
| 14304 | { |
| 14305 | tree main_decl = TYPE_MAIN_DECL (type); |
| 14306 | if (LAMBDA_TYPE_P (type)) |
| 14307 | { |
| 14308 | /* A lambda expression is, in practice, TU-local iff it has no |
| 14309 | mangling scope. This currently doesn't line up exactly with |
| 14310 | the standard's definition due to some ABI issues, but it's |
| 14311 | pretty close, and avoids other issues down the line. */ |
| 14312 | if (!LAMBDA_TYPE_EXTRA_SCOPE (type)) |
| 14313 | { |
| 14314 | if (explain) |
| 14315 | inform (loc, "%qT has no name and cannot be differentiated " |
| 14316 | "from similar lambdas in other TUs" , type); |
| 14317 | return true; |
| 14318 | } |
| 14319 | } |
| 14320 | else if (!DECL_CLASS_SCOPE_P (main_decl) |
| 14321 | && !decl_function_context (main_decl)) |
| 14322 | { |
| 14323 | if (explain) |
| 14324 | inform (loc, "%qT has no name and is not defined within a class, " |
| 14325 | "function, or initializer" , type); |
| 14326 | return true; |
| 14327 | } |
| 14328 | |
| 14329 | // FIXME introduced by a defining-type-specifier only declaring TU-local |
| 14330 | // entities; does this refer to e.g. 'static struct {} a;"? I can't |
| 14331 | // think of any cases where this isn't covered by earlier cases. */ |
| 14332 | } |
| 14333 | |
| 14334 | return false; |
| 14335 | } |
| 14336 | |
| 14337 | /* Helper for is_tu_local_entity. Returns true if one of the ARGS of |
| 14338 | DECL is TU-local. Emits an explanation if EXPLAIN is true. */ |
| 14339 | |
| 14340 | static bool |
| 14341 | has_tu_local_tmpl_arg (tree decl, tree args, bool explain) |
| 14342 | { |
| 14343 | if (!args || TREE_CODE (args) != TREE_VEC) |
| 14344 | return false; |
| 14345 | |
| 14346 | for (tree a : tree_vec_range (args)) |
| 14347 | { |
| 14348 | if (TREE_CODE (a) == TREE_VEC) |
| 14349 | { |
| 14350 | if (has_tu_local_tmpl_arg (decl, args: a, explain)) |
| 14351 | return true; |
| 14352 | } |
| 14353 | else if (!WILDCARD_TYPE_P (a)) |
| 14354 | { |
| 14355 | if (DECL_P (a) && is_tu_local_entity (decl: a)) |
| 14356 | { |
| 14357 | if (explain) |
| 14358 | { |
| 14359 | inform (DECL_SOURCE_LOCATION (decl), |
| 14360 | "%qD has TU-local template argument %qD" , |
| 14361 | decl, a); |
| 14362 | is_tu_local_entity (decl: a, /*explain=*/true); |
| 14363 | } |
| 14364 | return true; |
| 14365 | } |
| 14366 | |
| 14367 | if (TYPE_P (a) && TYPE_NAME (a) && is_tu_local_entity (TYPE_NAME (a))) |
| 14368 | { |
| 14369 | if (explain) |
| 14370 | { |
| 14371 | inform (DECL_SOURCE_LOCATION (decl), |
| 14372 | "%qD has TU-local template argument %qT" , |
| 14373 | decl, a); |
| 14374 | is_tu_local_entity (TYPE_NAME (a), /*explain=*/true); |
| 14375 | } |
| 14376 | return true; |
| 14377 | } |
| 14378 | |
| 14379 | if (EXPR_P (a) && is_tu_local_value (decl, expr: a, explain)) |
| 14380 | return true; |
| 14381 | } |
| 14382 | } |
| 14383 | |
| 14384 | return false; |
| 14385 | } |
| 14386 | |
| 14387 | /* Returns true if EXPR (part of the initializer for DECL) is a TU-local value |
| 14388 | or object. Emits an explanation if EXPLAIN is true. */ |
| 14389 | |
| 14390 | static bool |
| 14391 | is_tu_local_value (tree decl, tree expr, bool explain/*=false*/) |
| 14392 | { |
| 14393 | if (!expr) |
| 14394 | return false; |
| 14395 | |
| 14396 | tree e = expr; |
| 14397 | STRIP_ANY_LOCATION_WRAPPER (e); |
| 14398 | STRIP_NOPS (e); |
| 14399 | if (TREE_CODE (e) == TARGET_EXPR) |
| 14400 | e = TARGET_EXPR_INITIAL (e); |
| 14401 | if (!e) |
| 14402 | return false; |
| 14403 | |
| 14404 | /* It is, or is a pointer to, a TU-local function or the object associated |
| 14405 | with a TU-local variable. */ |
| 14406 | tree object = NULL_TREE; |
| 14407 | if (TREE_CODE (e) == ADDR_EXPR) |
| 14408 | object = TREE_OPERAND (e, 0); |
| 14409 | else if (TREE_CODE (e) == PTRMEM_CST) |
| 14410 | object = PTRMEM_CST_MEMBER (e); |
| 14411 | else if (VAR_OR_FUNCTION_DECL_P (e)) |
| 14412 | object = e; |
| 14413 | |
| 14414 | if (object |
| 14415 | && VAR_OR_FUNCTION_DECL_P (object) |
| 14416 | && is_tu_local_entity (decl: object)) |
| 14417 | { |
| 14418 | if (explain) |
| 14419 | { |
| 14420 | /* We've lost a lot of location information by the time we get here, |
| 14421 | so let's just do our best effort. */ |
| 14422 | auto loc = cp_expr_loc_or_loc (t: expr, DECL_SOURCE_LOCATION (decl)); |
| 14423 | if (VAR_P (object)) |
| 14424 | inform (loc, "%qD refers to TU-local object %qD" , decl, object); |
| 14425 | else |
| 14426 | inform (loc, "%qD refers to TU-local function %qD" , decl, object); |
| 14427 | is_tu_local_entity (decl: object, explain: true); |
| 14428 | } |
| 14429 | return true; |
| 14430 | } |
| 14431 | |
| 14432 | /* It is an object of class or array type and any of its subobjects or |
| 14433 | any of the objects or functions to which its non-static data members |
| 14434 | of reference type refer is TU-local and is usable in constant |
| 14435 | expressions. */ |
| 14436 | if (TREE_CODE (e) == CONSTRUCTOR && AGGREGATE_TYPE_P (TREE_TYPE (e))) |
| 14437 | for (auto &f : CONSTRUCTOR_ELTS (e)) |
| 14438 | if (is_tu_local_value (decl, expr: f.value, explain)) |
| 14439 | return true; |
| 14440 | |
| 14441 | return false; |
| 14442 | } |
| 14443 | |
| 14444 | /* Complains if DECL is a TU-local entity imported from a named module. |
| 14445 | Returns TRUE if instantiation should fail. */ |
| 14446 | |
| 14447 | bool |
| 14448 | instantiating_tu_local_entity (tree decl) |
| 14449 | { |
| 14450 | if (!modules_p ()) |
| 14451 | return false; |
| 14452 | |
| 14453 | if (TREE_CODE (decl) == TU_LOCAL_ENTITY) |
| 14454 | { |
| 14455 | auto_diagnostic_group d; |
| 14456 | error ("instantiation exposes TU-local entity %qD" , |
| 14457 | TU_LOCAL_ENTITY_NAME (decl)); |
| 14458 | inform (TU_LOCAL_ENTITY_LOCATION (decl), "declared here" ); |
| 14459 | return true; |
| 14460 | } |
| 14461 | |
| 14462 | /* Currently, only TU-local variables and functions, or possibly |
| 14463 | templates thereof, will be emitted from named modules. */ |
| 14464 | tree inner = STRIP_TEMPLATE (decl); |
| 14465 | if (!VAR_OR_FUNCTION_DECL_P (inner)) |
| 14466 | return false; |
| 14467 | |
| 14468 | /* From this point we will only be emitting warnings; if we're not |
| 14469 | warning about this case then there's no need to check further. */ |
| 14470 | if (!warn_expose_global_module_tu_local |
| 14471 | || !warning_enabled_at (DECL_SOURCE_LOCATION (decl), |
| 14472 | opt_id: OPT_Wexpose_global_module_tu_local)) |
| 14473 | return false; |
| 14474 | |
| 14475 | if (!is_tu_local_entity (decl)) |
| 14476 | return false; |
| 14477 | |
| 14478 | if (!DECL_LANG_SPECIFIC (inner) |
| 14479 | || !DECL_MODULE_IMPORT_P (inner)) |
| 14480 | return false; |
| 14481 | |
| 14482 | /* Referencing TU-local entities from a header is generally OK. |
| 14483 | We don't have an easy way to detect if this declaration came |
| 14484 | from a header via a separate named module, but we can just |
| 14485 | ignore that case for warning purposes. */ |
| 14486 | unsigned index = import_entity_index (decl); |
| 14487 | module_state *mod = import_entity_module (index); |
| 14488 | if (mod->is_header ()) |
| 14489 | return false; |
| 14490 | |
| 14491 | auto_diagnostic_group d; |
| 14492 | warning (OPT_Wexpose_global_module_tu_local, |
| 14493 | "instantiation exposes TU-local entity %qD" , decl); |
| 14494 | inform (DECL_SOURCE_LOCATION (decl), "declared here" ); |
| 14495 | |
| 14496 | /* We treat TU-local entities from the GMF as not actually being |
| 14497 | TU-local as an extension, so allow instantation to proceed. */ |
| 14498 | return false; |
| 14499 | } |
| 14500 | |
| 14501 | /* DECL is a newly discovered dependency. Create the depset, if it |
| 14502 | doesn't already exist. Add it to the worklist if so. |
| 14503 | |
| 14504 | DECL will be an OVL_USING_P OVERLOAD, if it's from a binding that's |
| 14505 | a using decl. |
| 14506 | |
| 14507 | We do not have to worry about adding the same dependency more than |
| 14508 | once. First it's harmless, but secondly the TREE_VISITED marking |
| 14509 | prevents us wanting to do it anyway. */ |
| 14510 | |
| 14511 | depset * |
| 14512 | depset::hash::make_dependency (tree decl, entity_kind ek) |
| 14513 | { |
| 14514 | /* Make sure we're being told consistent information. */ |
| 14515 | gcc_checking_assert ((ek == EK_NAMESPACE) |
| 14516 | == (TREE_CODE (decl) == NAMESPACE_DECL |
| 14517 | && !DECL_NAMESPACE_ALIAS (decl))); |
| 14518 | gcc_checking_assert (ek != EK_BINDING && ek != EK_REDIRECT); |
| 14519 | gcc_checking_assert (TREE_CODE (decl) != FIELD_DECL |
| 14520 | && (TREE_CODE (decl) != USING_DECL |
| 14521 | || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)); |
| 14522 | gcc_checking_assert (!is_key_order ()); |
| 14523 | if (ek == EK_USING) |
| 14524 | gcc_checking_assert (TREE_CODE (decl) == OVERLOAD); |
| 14525 | if (ek == EK_TU_LOCAL) |
| 14526 | gcc_checking_assert (DECL_DECLARES_FUNCTION_P (decl)); |
| 14527 | |
| 14528 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 14529 | /* The template should have copied these from its result decl. */ |
| 14530 | gcc_checking_assert (DECL_MODULE_EXPORT_P (decl) |
| 14531 | == DECL_MODULE_EXPORT_P (DECL_TEMPLATE_RESULT (decl))); |
| 14532 | |
| 14533 | depset **slot = entity_slot (entity: decl, insert: true); |
| 14534 | depset *dep = *slot; |
| 14535 | bool for_binding = ek == EK_FOR_BINDING; |
| 14536 | |
| 14537 | if (!dep) |
| 14538 | { |
| 14539 | if ((DECL_IMPLICIT_TYPEDEF_P (decl) |
| 14540 | /* ... not an enum, for instance. */ |
| 14541 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)) |
| 14542 | && TYPE_LANG_SPECIFIC (TREE_TYPE (decl)) |
| 14543 | && CLASSTYPE_USE_TEMPLATE (TREE_TYPE (decl)) == 2) |
| 14544 | || (VAR_P (decl) |
| 14545 | && DECL_LANG_SPECIFIC (decl) |
| 14546 | && DECL_USE_TEMPLATE (decl) == 2)) |
| 14547 | { |
| 14548 | /* A partial or explicit specialization. Partial |
| 14549 | specializations might not be in the hash table, because |
| 14550 | there can be multiple differently-constrained variants. |
| 14551 | |
| 14552 | template<typename T> class silly; |
| 14553 | template<typename T> requires true class silly {}; |
| 14554 | |
| 14555 | We need to find them, insert their TEMPLATE_DECL in the |
| 14556 | dep_hash, and then convert the dep we just found into a |
| 14557 | redirect. */ |
| 14558 | |
| 14559 | tree ti = get_template_info (decl); |
| 14560 | tree tmpl = TI_TEMPLATE (ti); |
| 14561 | tree partial = NULL_TREE; |
| 14562 | for (tree spec = DECL_TEMPLATE_SPECIALIZATIONS (tmpl); |
| 14563 | spec; spec = TREE_CHAIN (spec)) |
| 14564 | if (DECL_TEMPLATE_RESULT (TREE_VALUE (spec)) == decl) |
| 14565 | { |
| 14566 | partial = TREE_VALUE (spec); |
| 14567 | break; |
| 14568 | } |
| 14569 | |
| 14570 | if (partial) |
| 14571 | { |
| 14572 | /* Eagerly create an empty redirect. The following |
| 14573 | make_dependency call could cause hash reallocation, |
| 14574 | and invalidate slot's value. */ |
| 14575 | depset *redirect = make_entity (entity: decl, ek: EK_REDIRECT); |
| 14576 | |
| 14577 | /* Redirects are never reached -- always snap to their target. */ |
| 14578 | redirect->set_flag_bit<DB_UNREACHED_BIT> (); |
| 14579 | |
| 14580 | *slot = redirect; |
| 14581 | |
| 14582 | depset *tmpl_dep = make_dependency (decl: partial, ek: EK_PARTIAL); |
| 14583 | gcc_checking_assert (tmpl_dep->get_entity_kind () == EK_PARTIAL); |
| 14584 | |
| 14585 | redirect->deps.safe_push (obj: tmpl_dep); |
| 14586 | |
| 14587 | return redirect; |
| 14588 | } |
| 14589 | } |
| 14590 | |
| 14591 | bool has_def = ek != EK_USING && has_definition (decl); |
| 14592 | if (ek > EK_BINDING) |
| 14593 | ek = EK_DECL; |
| 14594 | |
| 14595 | /* The only OVERLOADS we should see are USING decls from |
| 14596 | bindings. */ |
| 14597 | *slot = dep = make_entity (entity: decl, ek, is_defn: has_def); |
| 14598 | |
| 14599 | if (CHECKING_P && TREE_CODE (decl) == TEMPLATE_DECL) |
| 14600 | /* The template_result should otherwise not be in the |
| 14601 | table, or be an empty redirect (created above). */ |
| 14602 | if (auto *eslot = entity_slot (DECL_TEMPLATE_RESULT (decl), insert: false)) |
| 14603 | gcc_checking_assert ((*eslot)->get_entity_kind () == EK_REDIRECT |
| 14604 | && !(*eslot)->deps.length ()); |
| 14605 | |
| 14606 | if (ignore_exposure) |
| 14607 | dep->set_flag_bit<DB_IGNORED_EXPOSURE_BIT> (); |
| 14608 | |
| 14609 | if (ek != EK_USING) |
| 14610 | { |
| 14611 | tree not_tmpl = STRIP_TEMPLATE (decl); |
| 14612 | bool imported_from_module_p = false; |
| 14613 | |
| 14614 | if (DECL_LANG_SPECIFIC (not_tmpl) |
| 14615 | && DECL_MODULE_IMPORT_P (not_tmpl)) |
| 14616 | { |
| 14617 | /* Store the module number and index in cluster/section, |
| 14618 | so we don't have to look them up again. */ |
| 14619 | unsigned index = import_entity_index (decl); |
| 14620 | module_state *from = import_entity_module (index); |
| 14621 | /* Remap will be zero for imports from partitions, which |
| 14622 | we want to treat as-if declared in this TU. */ |
| 14623 | if (from->remap) |
| 14624 | { |
| 14625 | dep->cluster = index - from->entity_lwm; |
| 14626 | dep->section = from->remap; |
| 14627 | dep->set_flag_bit<DB_IMPORTED_BIT> (); |
| 14628 | |
| 14629 | if (!from->is_header ()) |
| 14630 | imported_from_module_p = true; |
| 14631 | } |
| 14632 | } |
| 14633 | |
| 14634 | /* Check for TU-local entities. This is unnecessary in header |
| 14635 | units because we can export internal-linkage decls, and |
| 14636 | no declarations are exposures. Similarly, if the decl was |
| 14637 | imported from a non-header module we know it cannot have |
| 14638 | been TU-local. */ |
| 14639 | if (!header_module_p () && !imported_from_module_p) |
| 14640 | { |
| 14641 | if (is_tu_local_entity (decl)) |
| 14642 | dep->set_flag_bit<DB_TU_LOCAL_BIT> (); |
| 14643 | |
| 14644 | if (VAR_P (decl) |
| 14645 | && decl_maybe_constant_var_p (decl) |
| 14646 | && is_tu_local_value (decl, DECL_INITIAL (decl))) |
| 14647 | { |
| 14648 | /* A potentially-constant variable initialized to a TU-local |
| 14649 | value is not usable in constant expressions within other |
| 14650 | translation units. We can achieve this by simply not |
| 14651 | streaming the definition in such cases. */ |
| 14652 | dep->clear_flag_bit<DB_DEFN_BIT> (); |
| 14653 | |
| 14654 | if (DECL_DECLARED_CONSTEXPR_P (decl) |
| 14655 | || DECL_INLINE_VAR_P (decl)) |
| 14656 | /* A constexpr variable initialized to a TU-local value, |
| 14657 | or an inline value (PR c++/119996), is an exposure. |
| 14658 | |
| 14659 | For simplicity, we don't support "non-strict" TU-local |
| 14660 | values: even if the TU-local entity we refer to in the |
| 14661 | initialiser is in the GMF, we still won't consider this |
| 14662 | valid in constant expressions in other TUs, and so |
| 14663 | complain accordingly. */ |
| 14664 | dep->set_flag_bit<DB_EXPOSE_PURVIEW_BIT> (); |
| 14665 | } |
| 14666 | } |
| 14667 | |
| 14668 | /* A namespace-scope type may be declared in one module unit |
| 14669 | and defined in another; make sure that we're found when |
| 14670 | completing the class. */ |
| 14671 | if (ek == EK_DECL |
| 14672 | && !dep->is_import () |
| 14673 | && dep->has_defn () |
| 14674 | && DECL_NAMESPACE_SCOPE_P (not_tmpl) |
| 14675 | && DECL_IMPLICIT_TYPEDEF_P (not_tmpl) |
| 14676 | /* Anonymous types can't be forward-declared. */ |
| 14677 | && !IDENTIFIER_ANON_P (DECL_NAME (not_tmpl))) |
| 14678 | dep->set_flag_bit<DB_IS_PENDING_BIT> (); |
| 14679 | |
| 14680 | /* Namespace-scope functions can be found by ADL by template |
| 14681 | instantiations in this module. We need to create bindings |
| 14682 | for them so that name lookup recognises they exist, if they |
| 14683 | won't be discarded. add_binding_entity is too early to do |
| 14684 | this for GM functions, because if nobody ends up using them |
| 14685 | we'll have leftover bindings laying around, and it's tricky |
| 14686 | to delete them and any namespaces they've implicitly created |
| 14687 | deps on. The downside is this means we don't pick up on |
| 14688 | using-decls, but by [module.global.frag] p3.6 we don't have |
| 14689 | to. */ |
| 14690 | if (ek == EK_DECL |
| 14691 | && !for_binding |
| 14692 | && !dep->is_import () |
| 14693 | && !dep->is_tu_local () |
| 14694 | && DECL_NAMESPACE_SCOPE_P (decl) |
| 14695 | && DECL_DECLARES_FUNCTION_P (decl) |
| 14696 | /* Compiler-generated functions won't participate in ADL. */ |
| 14697 | && !DECL_ARTIFICIAL (decl) |
| 14698 | /* A hidden friend doesn't need a binding. */ |
| 14699 | && !(DECL_LANG_SPECIFIC (not_tmpl) |
| 14700 | && DECL_UNIQUE_FRIEND_P (not_tmpl))) |
| 14701 | { |
| 14702 | /* This will only affect GM functions. */ |
| 14703 | gcc_checking_assert (!DECL_LANG_SPECIFIC (not_tmpl) |
| 14704 | || !DECL_MODULE_PURVIEW_P (not_tmpl)); |
| 14705 | /* We shouldn't see any instantiations or specialisations. */ |
| 14706 | gcc_checking_assert (!DECL_LANG_SPECIFIC (decl) |
| 14707 | || !DECL_USE_TEMPLATE (decl)); |
| 14708 | |
| 14709 | tree ns = CP_DECL_CONTEXT (decl); |
| 14710 | tree name = DECL_NAME (decl); |
| 14711 | depset *binding = find_binding (ctx: ns, name); |
| 14712 | if (!binding) |
| 14713 | { |
| 14714 | binding = make_binding (ns, name); |
| 14715 | add_namespace_context (binding, ns); |
| 14716 | |
| 14717 | depset **slot = binding_slot (ctx: ns, name, /*insert=*/true); |
| 14718 | *slot = binding; |
| 14719 | } |
| 14720 | |
| 14721 | binding->deps.safe_push (obj: dep); |
| 14722 | dep->deps.safe_push (obj: binding); |
| 14723 | dump (dumper::DEPEND) |
| 14724 | && dump ("Built ADL binding for %C:%N" , |
| 14725 | TREE_CODE (decl), decl); |
| 14726 | } |
| 14727 | } |
| 14728 | |
| 14729 | if (!dep->is_import ()) |
| 14730 | worklist.safe_push (obj: dep); |
| 14731 | } |
| 14732 | else if (!ignore_exposure) |
| 14733 | dep->clear_flag_bit<DB_IGNORED_EXPOSURE_BIT> (); |
| 14734 | |
| 14735 | dump (dumper::DEPEND) |
| 14736 | && dump ("%s on %s %C:%N found" , |
| 14737 | ek == EK_REDIRECT ? "Redirect" |
| 14738 | : (for_binding || ek == EK_TU_LOCAL) ? "Binding" |
| 14739 | : "Dependency" , |
| 14740 | dep->entity_kind_name (), TREE_CODE (decl), decl); |
| 14741 | |
| 14742 | return dep; |
| 14743 | } |
| 14744 | |
| 14745 | /* Whether REF is an exposure of a member type of SOURCE. |
| 14746 | |
| 14747 | This comes up with exposures of class-scope lambdas, that we currently |
| 14748 | treat as TU-local due to ABI reasons. In such a case the type of the |
| 14749 | lambda will be exposed in two places, first by the class type it is in |
| 14750 | the TYPE_FIELDS list of, and second by the actual member declaring that |
| 14751 | lambda. We only want the second case to warn. */ |
| 14752 | |
| 14753 | static bool |
| 14754 | is_exposure_of_member_type (depset *source, depset *ref) |
| 14755 | { |
| 14756 | gcc_checking_assert (source->refs_tu_local (/*strict=*/true) |
| 14757 | && ref->is_tu_local (/*strict=*/true)); |
| 14758 | tree source_entity = STRIP_TEMPLATE (source->get_entity ()); |
| 14759 | tree ref_entity = STRIP_TEMPLATE (ref->get_entity ()); |
| 14760 | |
| 14761 | if (!source->is_tu_local (/*strict=*/true) |
| 14762 | && source_entity |
| 14763 | && ref_entity |
| 14764 | && DECL_IMPLICIT_TYPEDEF_P (source_entity) |
| 14765 | && DECL_IMPLICIT_TYPEDEF_P (ref_entity) |
| 14766 | && DECL_CLASS_SCOPE_P (ref_entity) |
| 14767 | && DECL_CONTEXT (ref_entity) == TREE_TYPE (source_entity)) |
| 14768 | { |
| 14769 | gcc_checking_assert (LAMBDA_TYPE_P (TREE_TYPE (ref_entity))); |
| 14770 | return true; |
| 14771 | } |
| 14772 | else |
| 14773 | return false; |
| 14774 | } |
| 14775 | |
| 14776 | /* DEP is a newly discovered dependency. Append it to current's |
| 14777 | depset. */ |
| 14778 | |
| 14779 | void |
| 14780 | depset::hash::add_dependency (depset *dep) |
| 14781 | { |
| 14782 | gcc_checking_assert (current && !is_key_order ()); |
| 14783 | current->deps.safe_push (obj: dep); |
| 14784 | |
| 14785 | if (dep->is_tu_local (/*strict=*/true)) |
| 14786 | { |
| 14787 | if (dep->is_tu_local ()) |
| 14788 | current->set_flag_bit<DB_REF_PURVIEW_BIT> (); |
| 14789 | else |
| 14790 | current->set_flag_bit<DB_REF_GLOBAL_BIT> (); |
| 14791 | |
| 14792 | if (!ignore_exposure && !is_exposure_of_member_type (source: current, ref: dep)) |
| 14793 | { |
| 14794 | if (dep->is_tu_local ()) |
| 14795 | current->set_flag_bit<DB_EXPOSE_PURVIEW_BIT> (); |
| 14796 | else |
| 14797 | current->set_flag_bit<DB_EXPOSE_GLOBAL_BIT> (); |
| 14798 | } |
| 14799 | } |
| 14800 | |
| 14801 | if (current->get_entity_kind () == EK_USING |
| 14802 | && DECL_IMPLICIT_TYPEDEF_P (dep->get_entity ()) |
| 14803 | && TREE_CODE (TREE_TYPE (dep->get_entity ())) == ENUMERAL_TYPE) |
| 14804 | { |
| 14805 | /* CURRENT is an unwrapped using-decl and DECL is an enum's |
| 14806 | implicit typedef. Is CURRENT a member of the enum? */ |
| 14807 | tree c_decl = OVL_FUNCTION (current->get_entity ()); |
| 14808 | |
| 14809 | if (TREE_CODE (c_decl) == CONST_DECL |
| 14810 | && (current->deps[0]->get_entity () |
| 14811 | == CP_DECL_CONTEXT (dep->get_entity ()))) |
| 14812 | /* Make DECL depend on CURRENT. */ |
| 14813 | dep->deps.safe_push (obj: current); |
| 14814 | } |
| 14815 | |
| 14816 | /* If two dependencies recursively depend on each other existing within |
| 14817 | their own merge keys, we must ensure that the first dep we saw while |
| 14818 | walking is written first in this cluster. See sort_cluster for more |
| 14819 | details. */ |
| 14820 | if (writing_merge_key) |
| 14821 | { |
| 14822 | dep->set_flag_bit<DB_MAYBE_RECURSIVE_BIT> (); |
| 14823 | if (!current->is_maybe_recursive ()) |
| 14824 | current->set_flag_bit<DB_ENTRY_BIT> (); |
| 14825 | } |
| 14826 | |
| 14827 | if (dep->is_unreached ()) |
| 14828 | { |
| 14829 | /* The dependency is reachable now. */ |
| 14830 | reached_unreached = true; |
| 14831 | dep->clear_flag_bit<DB_UNREACHED_BIT> (); |
| 14832 | dump (dumper::DEPEND) |
| 14833 | && dump ("Reaching unreached %s %C:%N" , dep->entity_kind_name (), |
| 14834 | TREE_CODE (dep->get_entity ()), dep->get_entity ()); |
| 14835 | } |
| 14836 | } |
| 14837 | |
| 14838 | depset * |
| 14839 | depset::hash::add_dependency (tree decl, entity_kind ek) |
| 14840 | { |
| 14841 | depset *dep; |
| 14842 | |
| 14843 | if (is_key_order ()) |
| 14844 | { |
| 14845 | dep = find_dependency (decl); |
| 14846 | if (dep) |
| 14847 | { |
| 14848 | current->deps.safe_push (obj: dep); |
| 14849 | dump (dumper::MERGE) |
| 14850 | && dump ("Key dependency on %s %C:%N found" , |
| 14851 | dep->entity_kind_name (), TREE_CODE (decl), decl); |
| 14852 | } |
| 14853 | else |
| 14854 | { |
| 14855 | /* It's not a mergeable decl, look for it in the original |
| 14856 | table. */ |
| 14857 | dep = chain->find_dependency (decl); |
| 14858 | gcc_checking_assert (dep); |
| 14859 | } |
| 14860 | } |
| 14861 | else |
| 14862 | { |
| 14863 | dep = make_dependency (decl, ek); |
| 14864 | if (dep->get_entity_kind () != EK_REDIRECT) |
| 14865 | add_dependency (dep); |
| 14866 | } |
| 14867 | |
| 14868 | return dep; |
| 14869 | } |
| 14870 | |
| 14871 | void |
| 14872 | depset::hash::add_namespace_context (depset *dep, tree ns) |
| 14873 | { |
| 14874 | depset *ns_dep = make_dependency (decl: ns, ek: depset::EK_NAMESPACE); |
| 14875 | dep->deps.safe_push (obj: ns_dep); |
| 14876 | |
| 14877 | /* Mark it as special if imported so we don't walk connect when |
| 14878 | SCCing. */ |
| 14879 | if (!dep->is_binding () && ns_dep->is_import ()) |
| 14880 | dep->set_special (); |
| 14881 | } |
| 14882 | |
| 14883 | struct add_binding_data |
| 14884 | { |
| 14885 | tree ns; |
| 14886 | bitmap partitions; |
| 14887 | depset *binding; |
| 14888 | depset::hash *hash; |
| 14889 | bool met_namespace; |
| 14890 | }; |
| 14891 | |
| 14892 | /* Return true if we are, or contain something that is exported. */ |
| 14893 | |
| 14894 | bool |
| 14895 | depset::hash::add_binding_entity (tree decl, WMB_Flags flags, void *data_) |
| 14896 | { |
| 14897 | auto data = static_cast <add_binding_data *> (data_); |
| 14898 | decl = strip_using_decl (decl); |
| 14899 | |
| 14900 | if (!(TREE_CODE (decl) == NAMESPACE_DECL && !DECL_NAMESPACE_ALIAS (decl))) |
| 14901 | { |
| 14902 | tree inner = decl; |
| 14903 | |
| 14904 | if (TREE_CODE (inner) == CONST_DECL |
| 14905 | && TREE_CODE (DECL_CONTEXT (inner)) == ENUMERAL_TYPE |
| 14906 | /* A using-decl could make a CONST_DECL purview for a non-purview |
| 14907 | enumeration. */ |
| 14908 | && (!DECL_LANG_SPECIFIC (inner) || !DECL_MODULE_PURVIEW_P (inner))) |
| 14909 | inner = TYPE_NAME (DECL_CONTEXT (inner)); |
| 14910 | else if (TREE_CODE (inner) == TEMPLATE_DECL) |
| 14911 | inner = DECL_TEMPLATE_RESULT (inner); |
| 14912 | |
| 14913 | if ((!DECL_LANG_SPECIFIC (inner) || !DECL_MODULE_PURVIEW_P (inner)) |
| 14914 | && !((flags & WMB_Using) && (flags & WMB_Purview))) |
| 14915 | /* Ignore entities not within the module purview. We'll need to |
| 14916 | create bindings for any non-discarded function calls for ADL, |
| 14917 | but it's simpler to handle that at the point of use rather |
| 14918 | than trying to clear out bindings after the fact. */ |
| 14919 | return false; |
| 14920 | |
| 14921 | bool internal_decl = false; |
| 14922 | if (!header_module_p () && is_tu_local_entity (decl)) |
| 14923 | { |
| 14924 | /* A TU-local entity. For ADL we still need to create bindings |
| 14925 | for internal-linkage functions attached to a named module. */ |
| 14926 | if (DECL_DECLARES_FUNCTION_P (inner) |
| 14927 | && DECL_LANG_SPECIFIC (inner) |
| 14928 | && DECL_MODULE_ATTACH_P (inner)) |
| 14929 | { |
| 14930 | gcc_checking_assert (!DECL_MODULE_EXPORT_P (inner)); |
| 14931 | internal_decl = true; |
| 14932 | } |
| 14933 | else |
| 14934 | return false; |
| 14935 | } |
| 14936 | |
| 14937 | if ((TREE_CODE (decl) == VAR_DECL |
| 14938 | || TREE_CODE (decl) == TYPE_DECL) |
| 14939 | && DECL_TINFO_P (decl)) |
| 14940 | /* Ignore TINFO things. */ |
| 14941 | return false; |
| 14942 | |
| 14943 | if (TREE_CODE (decl) == VAR_DECL && DECL_NTTP_OBJECT_P (decl)) |
| 14944 | /* Ignore NTTP objects. */ |
| 14945 | return false; |
| 14946 | |
| 14947 | if (deduction_guide_p (decl)) |
| 14948 | { |
| 14949 | /* Ignore deduction guides, bindings for them will be created within |
| 14950 | find_dependencies for their class template. But still build a dep |
| 14951 | for them so that we don't discard them. */ |
| 14952 | data->hash->make_dependency (decl, ek: EK_FOR_BINDING); |
| 14953 | return false; |
| 14954 | } |
| 14955 | |
| 14956 | if (!(flags & WMB_Using) && CP_DECL_CONTEXT (decl) != data->ns) |
| 14957 | { |
| 14958 | /* An unscoped enum constant implicitly brought into the containing |
| 14959 | namespace. We treat this like a using-decl. */ |
| 14960 | gcc_checking_assert (TREE_CODE (decl) == CONST_DECL); |
| 14961 | |
| 14962 | flags = WMB_Flags (flags | WMB_Using); |
| 14963 | if (DECL_MODULE_EXPORT_P (TYPE_NAME (TREE_TYPE (decl))) |
| 14964 | /* A using-decl can make an enum constant exported for a |
| 14965 | non-exported enumeration. */ |
| 14966 | || (DECL_LANG_SPECIFIC (decl) && DECL_MODULE_EXPORT_P (decl))) |
| 14967 | flags = WMB_Flags (flags | WMB_Export); |
| 14968 | } |
| 14969 | |
| 14970 | if (!data->binding) |
| 14971 | /* No binding to check. */; |
| 14972 | else if (flags & WMB_Using) |
| 14973 | { |
| 14974 | /* Look in the binding to see if we already have this |
| 14975 | using. */ |
| 14976 | for (unsigned ix = data->binding->deps.length (); --ix;) |
| 14977 | { |
| 14978 | depset *d = data->binding->deps[ix]; |
| 14979 | if (d->get_entity_kind () == EK_USING |
| 14980 | && OVL_FUNCTION (d->get_entity ()) == decl) |
| 14981 | { |
| 14982 | if (!(flags & WMB_Hidden)) |
| 14983 | d->clear_hidden_binding (); |
| 14984 | OVL_PURVIEW_P (d->get_entity ()) = true; |
| 14985 | if (flags & WMB_Export) |
| 14986 | OVL_EXPORT_P (d->get_entity ()) = true; |
| 14987 | return bool (flags & WMB_Export); |
| 14988 | } |
| 14989 | } |
| 14990 | } |
| 14991 | else if (flags & WMB_Dups) |
| 14992 | { |
| 14993 | /* Look in the binding to see if we already have this decl. */ |
| 14994 | for (unsigned ix = data->binding->deps.length (); --ix;) |
| 14995 | { |
| 14996 | depset *d = data->binding->deps[ix]; |
| 14997 | if (d->get_entity () == decl) |
| 14998 | { |
| 14999 | if (!(flags & WMB_Hidden)) |
| 15000 | d->clear_hidden_binding (); |
| 15001 | return false; |
| 15002 | } |
| 15003 | } |
| 15004 | } |
| 15005 | |
| 15006 | /* We're adding something. */ |
| 15007 | if (!data->binding) |
| 15008 | { |
| 15009 | data->binding = make_binding (ns: data->ns, DECL_NAME (decl)); |
| 15010 | data->hash->add_namespace_context (dep: data->binding, ns: data->ns); |
| 15011 | |
| 15012 | depset **slot = data->hash->binding_slot (ctx: data->ns, |
| 15013 | DECL_NAME (decl), insert: true); |
| 15014 | gcc_checking_assert (!*slot); |
| 15015 | *slot = data->binding; |
| 15016 | } |
| 15017 | |
| 15018 | /* Make sure nobody left a tree visited lying about. */ |
| 15019 | gcc_checking_assert (!TREE_VISITED (decl)); |
| 15020 | |
| 15021 | if (flags & WMB_Using) |
| 15022 | { |
| 15023 | decl = ovl_make (fn: decl, NULL_TREE); |
| 15024 | OVL_USING_P (decl) = true; |
| 15025 | OVL_PURVIEW_P (decl) = true; |
| 15026 | if (flags & WMB_Export) |
| 15027 | OVL_EXPORT_P (decl) = true; |
| 15028 | } |
| 15029 | |
| 15030 | entity_kind ek = EK_FOR_BINDING; |
| 15031 | if (internal_decl) |
| 15032 | ek = EK_TU_LOCAL; |
| 15033 | else if (flags & WMB_Using) |
| 15034 | ek = EK_USING; |
| 15035 | |
| 15036 | depset *dep = data->hash->make_dependency (decl, ek); |
| 15037 | if (flags & WMB_Hidden) |
| 15038 | dep->set_hidden_binding (); |
| 15039 | data->binding->deps.safe_push (obj: dep); |
| 15040 | /* Binding and contents are mutually dependent. */ |
| 15041 | dep->deps.safe_push (obj: data->binding); |
| 15042 | |
| 15043 | return (flags & WMB_Using |
| 15044 | ? flags & WMB_Export : DECL_MODULE_EXPORT_P (decl)); |
| 15045 | } |
| 15046 | else if (!data->met_namespace) |
| 15047 | { |
| 15048 | /* Namespace, walk exactly once. */ |
| 15049 | data->met_namespace = true; |
| 15050 | if (data->hash->add_namespace_entities (ns: decl, partitions: data->partitions)) |
| 15051 | { |
| 15052 | /* It contains an exported thing, so it is exported. */ |
| 15053 | gcc_checking_assert (DECL_MODULE_PURVIEW_P (decl)); |
| 15054 | gcc_checking_assert (TREE_PUBLIC (decl) || header_module_p ()); |
| 15055 | DECL_MODULE_EXPORT_P (decl) = true; |
| 15056 | } |
| 15057 | |
| 15058 | if (DECL_MODULE_PURVIEW_P (decl)) |
| 15059 | { |
| 15060 | data->hash->make_dependency (decl, ek: depset::EK_NAMESPACE); |
| 15061 | |
| 15062 | return DECL_MODULE_EXPORT_P (decl); |
| 15063 | } |
| 15064 | } |
| 15065 | |
| 15066 | return false; |
| 15067 | } |
| 15068 | |
| 15069 | /* Recursively find all the namespace bindings of NS. Add a depset |
| 15070 | for every binding that contains an export or module-linkage entity. |
| 15071 | Add a defining depset for every such decl that we need to write a |
| 15072 | definition. Such defining depsets depend on the binding depset. |
| 15073 | Returns true if we contain something exported. */ |
| 15074 | |
| 15075 | bool |
| 15076 | depset::hash::add_namespace_entities (tree ns, bitmap partitions) |
| 15077 | { |
| 15078 | dump () && dump ("Looking for writables in %N" , ns); |
| 15079 | dump.indent (); |
| 15080 | |
| 15081 | unsigned count = 0; |
| 15082 | add_binding_data data; |
| 15083 | data.ns = ns; |
| 15084 | data.partitions = partitions; |
| 15085 | data.hash = this; |
| 15086 | |
| 15087 | for (tree binding : *DECL_NAMESPACE_BINDINGS (ns)) |
| 15088 | { |
| 15089 | data.binding = nullptr; |
| 15090 | data.met_namespace = false; |
| 15091 | if (walk_module_binding (binding, partitions, add_binding_entity, data: &data)) |
| 15092 | count++; |
| 15093 | } |
| 15094 | |
| 15095 | /* Seed any using-directives so that we emit the relevant namespaces. */ |
| 15096 | for (tree udir : NAMESPACE_LEVEL (ns)->using_directives) |
| 15097 | if (TREE_CODE (udir) == USING_DECL && DECL_MODULE_PURVIEW_P (udir)) |
| 15098 | { |
| 15099 | make_dependency (USING_DECL_DECLS (udir), ek: depset::EK_NAMESPACE); |
| 15100 | if (DECL_MODULE_EXPORT_P (udir)) |
| 15101 | count++; |
| 15102 | } |
| 15103 | |
| 15104 | if (count) |
| 15105 | dump () && dump ("Found %u entries" , count); |
| 15106 | dump.outdent (); |
| 15107 | |
| 15108 | return count != 0; |
| 15109 | } |
| 15110 | |
| 15111 | void |
| 15112 | depset::hash::add_partial_entities (vec<tree, va_gc> *partial_classes) |
| 15113 | { |
| 15114 | for (unsigned ix = 0; ix != partial_classes->length (); ix++) |
| 15115 | { |
| 15116 | tree inner = (*partial_classes)[ix]; |
| 15117 | |
| 15118 | depset *dep = make_dependency (decl: inner, ek: depset::EK_DECL); |
| 15119 | |
| 15120 | if (dep->get_entity_kind () == depset::EK_REDIRECT) |
| 15121 | { |
| 15122 | dep = dep->deps[0]; |
| 15123 | /* We should have recorded the template as a partial |
| 15124 | specialization. */ |
| 15125 | gcc_checking_assert (dep->get_entity_kind () |
| 15126 | == depset::EK_PARTIAL); |
| 15127 | |
| 15128 | /* Only emit GM entities if reached. */ |
| 15129 | if (!DECL_LANG_SPECIFIC (inner) |
| 15130 | || !DECL_MODULE_PURVIEW_P (inner)) |
| 15131 | dep->set_flag_bit<DB_UNREACHED_BIT> (); |
| 15132 | } |
| 15133 | else |
| 15134 | { |
| 15135 | /* It was an explicit specialization, not a partial one. |
| 15136 | We should have already added this. */ |
| 15137 | gcc_checking_assert (dep->get_entity_kind () |
| 15138 | == depset::EK_SPECIALIZATION); |
| 15139 | gcc_checking_assert (dep->is_special ()); |
| 15140 | } |
| 15141 | } |
| 15142 | } |
| 15143 | |
| 15144 | /* Add the members of imported classes that we defined in this TU. |
| 15145 | This will also include lazily created implicit member function |
| 15146 | declarations. (All others will be definitions.) */ |
| 15147 | |
| 15148 | void |
| 15149 | depset::hash::add_class_entities (vec<tree, va_gc> *class_members) |
| 15150 | { |
| 15151 | for (unsigned ix = 0; ix != class_members->length (); ix++) |
| 15152 | { |
| 15153 | tree defn = (*class_members)[ix]; |
| 15154 | depset *dep = make_dependency (decl: defn, ek: EK_INNER_DECL); |
| 15155 | |
| 15156 | if (dep->get_entity_kind () == EK_REDIRECT) |
| 15157 | dep = dep->deps[0]; |
| 15158 | |
| 15159 | /* Only non-instantiations need marking as pendings. */ |
| 15160 | if (dep->get_entity_kind () == EK_DECL) |
| 15161 | dep->set_flag_bit <DB_IS_PENDING_BIT> (); |
| 15162 | } |
| 15163 | } |
| 15164 | |
| 15165 | /* Add any entities found via dependent ADL. */ |
| 15166 | |
| 15167 | void |
| 15168 | depset::hash::add_dependent_adl_entities (tree expr) |
| 15169 | { |
| 15170 | gcc_checking_assert (!is_key_order ()); |
| 15171 | if (TREE_CODE (current->get_entity ()) != TEMPLATE_DECL) |
| 15172 | return; |
| 15173 | |
| 15174 | dep_adl_info info; |
| 15175 | switch (TREE_CODE (expr)) |
| 15176 | { |
| 15177 | case CALL_EXPR: |
| 15178 | if (!KOENIG_LOOKUP_P (expr)) |
| 15179 | return; |
| 15180 | info.name = CALL_EXPR_FN (expr); |
| 15181 | if (!info.name) |
| 15182 | return; |
| 15183 | if (TREE_CODE (info.name) == TEMPLATE_ID_EXPR) |
| 15184 | info.name = TREE_OPERAND (info.name, 0); |
| 15185 | if (TREE_CODE (info.name) == TU_LOCAL_ENTITY) |
| 15186 | return; |
| 15187 | if (!identifier_p (t: info.name)) |
| 15188 | info.name = OVL_NAME (info.name); |
| 15189 | for (int ix = 0; ix < call_expr_nargs (expr); ix++) |
| 15190 | vec_safe_push (v&: info.args, CALL_EXPR_ARG (expr, ix)); |
| 15191 | break; |
| 15192 | |
| 15193 | case LE_EXPR: |
| 15194 | case GE_EXPR: |
| 15195 | case LT_EXPR: |
| 15196 | case GT_EXPR: |
| 15197 | info.rewrite = SPACESHIP_EXPR; |
| 15198 | goto overloadable_expr; |
| 15199 | |
| 15200 | case NE_EXPR: |
| 15201 | info.rewrite = EQ_EXPR; |
| 15202 | goto overloadable_expr; |
| 15203 | |
| 15204 | case EQ_EXPR: |
| 15205 | /* Not strictly a rewrite candidate, but we need to ensure |
| 15206 | that lookup of a matching NE_EXPR can succeed if that |
| 15207 | would inhibit a rewrite with reversed parameters. */ |
| 15208 | info.rewrite = NE_EXPR; |
| 15209 | goto overloadable_expr; |
| 15210 | |
| 15211 | case COMPOUND_EXPR: |
| 15212 | case MEMBER_REF: |
| 15213 | case MULT_EXPR: |
| 15214 | case TRUNC_DIV_EXPR: |
| 15215 | case TRUNC_MOD_EXPR: |
| 15216 | case PLUS_EXPR: |
| 15217 | case MINUS_EXPR: |
| 15218 | case LSHIFT_EXPR: |
| 15219 | case RSHIFT_EXPR: |
| 15220 | case SPACESHIP_EXPR: |
| 15221 | case BIT_AND_EXPR: |
| 15222 | case BIT_XOR_EXPR: |
| 15223 | case BIT_IOR_EXPR: |
| 15224 | case TRUTH_ANDIF_EXPR: |
| 15225 | case TRUTH_ORIF_EXPR: |
| 15226 | overloadable_expr: |
| 15227 | info.name = ovl_op_identifier (TREE_CODE (expr)); |
| 15228 | gcc_checking_assert (tree_operand_length (expr) == 2); |
| 15229 | vec_safe_push (v&: info.args, TREE_OPERAND (expr, 0)); |
| 15230 | vec_safe_push (v&: info.args, TREE_OPERAND (expr, 1)); |
| 15231 | break; |
| 15232 | |
| 15233 | default: |
| 15234 | return; |
| 15235 | } |
| 15236 | |
| 15237 | /* If all arguments are type-dependent we don't need to do |
| 15238 | anything further, we won't find new entities. */ |
| 15239 | processing_template_decl_sentinel ptds; |
| 15240 | ++processing_template_decl; |
| 15241 | if (!any_type_dependent_arguments_p (info.args)) |
| 15242 | return; |
| 15243 | |
| 15244 | /* We need to defer name lookup until after walking, otherwise |
| 15245 | we get confused by stray TREE_VISITEDs. */ |
| 15246 | dep_adl_entity_list.safe_push (obj: info); |
| 15247 | } |
| 15248 | |
| 15249 | /* We add the partial & explicit specializations, and the explicit |
| 15250 | instantiations. */ |
| 15251 | |
| 15252 | static void |
| 15253 | specialization_add (bool decl_p, spec_entry *entry, void *data_) |
| 15254 | { |
| 15255 | vec<spec_entry *> *data = reinterpret_cast <vec<spec_entry *> *> (data_); |
| 15256 | |
| 15257 | if (!decl_p) |
| 15258 | { |
| 15259 | /* We exclusively use decls to locate things. Make sure there's |
| 15260 | no mismatch between the two specialization tables we keep. |
| 15261 | pt.cc optimizes instantiation lookup using a complicated |
| 15262 | heuristic. We don't attempt to replicate that algorithm, but |
| 15263 | observe its behaviour and reproduce it upon read back. */ |
| 15264 | |
| 15265 | gcc_checking_assert (TREE_CODE (entry->spec) == ENUMERAL_TYPE |
| 15266 | || DECL_CLASS_TEMPLATE_P (entry->tmpl)); |
| 15267 | |
| 15268 | gcc_checking_assert (!match_mergeable_specialization (true, entry)); |
| 15269 | } |
| 15270 | else if (VAR_OR_FUNCTION_DECL_P (entry->spec)) |
| 15271 | gcc_checking_assert (!DECL_LOCAL_DECL_P (entry->spec)); |
| 15272 | |
| 15273 | data->safe_push (obj: entry); |
| 15274 | } |
| 15275 | |
| 15276 | /* Arbitrary stable comparison. */ |
| 15277 | |
| 15278 | static int |
| 15279 | specialization_cmp (const void *a_, const void *b_) |
| 15280 | { |
| 15281 | const spec_entry *ea = *reinterpret_cast<const spec_entry *const *> (a_); |
| 15282 | const spec_entry *eb = *reinterpret_cast<const spec_entry *const *> (b_); |
| 15283 | |
| 15284 | if (ea == eb) |
| 15285 | return 0; |
| 15286 | |
| 15287 | tree a = ea->spec; |
| 15288 | tree b = eb->spec; |
| 15289 | if (TYPE_P (a)) |
| 15290 | { |
| 15291 | a = TYPE_NAME (a); |
| 15292 | b = TYPE_NAME (b); |
| 15293 | } |
| 15294 | |
| 15295 | if (a == b) |
| 15296 | /* This can happen with friend specializations. Just order by |
| 15297 | entry address. See note in depset_cmp. */ |
| 15298 | return ea < eb ? -1 : +1; |
| 15299 | |
| 15300 | return DECL_UID (a) < DECL_UID (b) ? -1 : +1; |
| 15301 | } |
| 15302 | |
| 15303 | /* We add all kinds of specialializations. Implicit specializations |
| 15304 | should only streamed and walked if they are reachable from |
| 15305 | elsewhere. Hence the UNREACHED flag. This is making the |
| 15306 | assumption that it is cheaper to reinstantiate them on demand |
| 15307 | elsewhere, rather than stream them in when we instantiate their |
| 15308 | general template. Also, if we do stream them, we can only do that |
| 15309 | if they are not internal (which they can become if they themselves |
| 15310 | touch an internal entity?). */ |
| 15311 | |
| 15312 | void |
| 15313 | depset::hash::add_specializations (bool decl_p) |
| 15314 | { |
| 15315 | vec<spec_entry *> data; |
| 15316 | data.create (nelems: 100); |
| 15317 | walk_specializations (decl_p, specialization_add, &data); |
| 15318 | data.qsort (specialization_cmp); |
| 15319 | while (data.length ()) |
| 15320 | { |
| 15321 | spec_entry *entry = data.pop (); |
| 15322 | tree spec = entry->spec; |
| 15323 | int use_tpl = 0; |
| 15324 | bool is_friend = false; |
| 15325 | |
| 15326 | if (decl_p && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (entry->tmpl)) |
| 15327 | /* A friend of a template. This is keyed to the |
| 15328 | instantiation. */ |
| 15329 | is_friend = true; |
| 15330 | |
| 15331 | if (decl_p) |
| 15332 | { |
| 15333 | if (tree ti = DECL_TEMPLATE_INFO (spec)) |
| 15334 | { |
| 15335 | tree tmpl = TI_TEMPLATE (ti); |
| 15336 | |
| 15337 | use_tpl = DECL_USE_TEMPLATE (spec); |
| 15338 | if (spec == DECL_TEMPLATE_RESULT (tmpl)) |
| 15339 | { |
| 15340 | spec = tmpl; |
| 15341 | gcc_checking_assert (DECL_USE_TEMPLATE (spec) == use_tpl); |
| 15342 | } |
| 15343 | else if (is_friend) |
| 15344 | { |
| 15345 | if (TI_TEMPLATE (ti) != entry->tmpl |
| 15346 | || !template_args_equal (TI_ARGS (ti), entry->tmpl)) |
| 15347 | goto template_friend; |
| 15348 | } |
| 15349 | } |
| 15350 | else |
| 15351 | { |
| 15352 | template_friend:; |
| 15353 | gcc_checking_assert (is_friend); |
| 15354 | /* This is a friend of a template class, but not the one |
| 15355 | that generated entry->spec itself (i.e. it's an |
| 15356 | equivalent clone). We do not need to record |
| 15357 | this. */ |
| 15358 | continue; |
| 15359 | } |
| 15360 | } |
| 15361 | else |
| 15362 | { |
| 15363 | if (TREE_CODE (spec) == ENUMERAL_TYPE) |
| 15364 | { |
| 15365 | tree ctx = DECL_CONTEXT (TYPE_NAME (spec)); |
| 15366 | |
| 15367 | if (TYPE_P (ctx)) |
| 15368 | use_tpl = CLASSTYPE_USE_TEMPLATE (ctx); |
| 15369 | else |
| 15370 | use_tpl = DECL_USE_TEMPLATE (ctx); |
| 15371 | } |
| 15372 | else |
| 15373 | use_tpl = CLASSTYPE_USE_TEMPLATE (spec); |
| 15374 | |
| 15375 | tree ti = TYPE_TEMPLATE_INFO (spec); |
| 15376 | tree tmpl = TI_TEMPLATE (ti); |
| 15377 | |
| 15378 | spec = TYPE_NAME (spec); |
| 15379 | if (spec == DECL_TEMPLATE_RESULT (tmpl)) |
| 15380 | { |
| 15381 | spec = tmpl; |
| 15382 | use_tpl = DECL_USE_TEMPLATE (spec); |
| 15383 | } |
| 15384 | } |
| 15385 | |
| 15386 | bool needs_reaching = false; |
| 15387 | if (use_tpl == 1) |
| 15388 | /* Implicit instantiations only walked if we reach them. */ |
| 15389 | needs_reaching = true; |
| 15390 | else if (!DECL_LANG_SPECIFIC (STRIP_TEMPLATE (spec)) |
| 15391 | || !DECL_MODULE_PURVIEW_P (STRIP_TEMPLATE (spec))) |
| 15392 | /* Likewise, GMF explicit or partial specializations. */ |
| 15393 | needs_reaching = true; |
| 15394 | |
| 15395 | #if false && CHECKING_P |
| 15396 | /* The instantiation isn't always on |
| 15397 | DECL_TEMPLATE_INSTANTIATIONS, */ |
| 15398 | // FIXME: we probably need to remember this information? |
| 15399 | /* Verify the specialization is on the |
| 15400 | DECL_TEMPLATE_INSTANTIATIONS of the template. */ |
| 15401 | for (tree cons = DECL_TEMPLATE_INSTANTIATIONS (entry->tmpl); |
| 15402 | cons; cons = TREE_CHAIN (cons)) |
| 15403 | if (TREE_VALUE (cons) == entry->spec) |
| 15404 | { |
| 15405 | gcc_assert (entry->args == TREE_PURPOSE (cons)); |
| 15406 | goto have_spec; |
| 15407 | } |
| 15408 | gcc_unreachable (); |
| 15409 | have_spec:; |
| 15410 | #endif |
| 15411 | |
| 15412 | /* Make sure nobody left a tree visited lying about. */ |
| 15413 | gcc_checking_assert (!TREE_VISITED (spec)); |
| 15414 | depset *dep = make_dependency (decl: spec, ek: depset::EK_SPECIALIZATION); |
| 15415 | if (dep->is_special ()) |
| 15416 | gcc_unreachable (); |
| 15417 | else |
| 15418 | { |
| 15419 | if (dep->get_entity_kind () == depset::EK_REDIRECT) |
| 15420 | dep = dep->deps[0]; |
| 15421 | else if (dep->get_entity_kind () == depset::EK_SPECIALIZATION) |
| 15422 | { |
| 15423 | dep->set_special (); |
| 15424 | dep->deps.safe_push (obj: reinterpret_cast<depset *> (entry)); |
| 15425 | if (!decl_p) |
| 15426 | dep->set_flag_bit<DB_TYPE_SPEC_BIT> (); |
| 15427 | } |
| 15428 | |
| 15429 | if (needs_reaching) |
| 15430 | dep->set_flag_bit<DB_UNREACHED_BIT> (); |
| 15431 | if (is_friend) |
| 15432 | dep->set_flag_bit<DB_FRIEND_SPEC_BIT> (); |
| 15433 | } |
| 15434 | } |
| 15435 | data.release (); |
| 15436 | } |
| 15437 | |
| 15438 | /* Add a depset into the mergeable hash. */ |
| 15439 | |
| 15440 | void |
| 15441 | depset::hash::add_mergeable (depset *mergeable) |
| 15442 | { |
| 15443 | gcc_checking_assert (is_key_order ()); |
| 15444 | entity_kind ek = mergeable->get_entity_kind (); |
| 15445 | tree decl = mergeable->get_entity (); |
| 15446 | gcc_checking_assert (ek < EK_DIRECT_HWM); |
| 15447 | |
| 15448 | depset **slot = entity_slot (entity: decl, insert: true); |
| 15449 | gcc_checking_assert (!*slot); |
| 15450 | depset *dep = make_entity (entity: decl, ek); |
| 15451 | *slot = dep; |
| 15452 | |
| 15453 | worklist.safe_push (obj: dep); |
| 15454 | |
| 15455 | /* So we can locate the mergeable depset this depset refers to, |
| 15456 | mark the first dep. */ |
| 15457 | dep->set_special (); |
| 15458 | dep->deps.safe_push (obj: mergeable); |
| 15459 | } |
| 15460 | |
| 15461 | /* Find the innermost-namespace scope of DECL, and that |
| 15462 | namespace-scope decl. */ |
| 15463 | |
| 15464 | tree |
| 15465 | find_pending_key (tree decl, tree *decl_p = nullptr) |
| 15466 | { |
| 15467 | tree ns = decl; |
| 15468 | do |
| 15469 | { |
| 15470 | decl = ns; |
| 15471 | ns = CP_DECL_CONTEXT (ns); |
| 15472 | if (TYPE_P (ns)) |
| 15473 | ns = TYPE_NAME (ns); |
| 15474 | } |
| 15475 | while (TREE_CODE (ns) != NAMESPACE_DECL); |
| 15476 | |
| 15477 | if (decl_p) |
| 15478 | *decl_p = decl; |
| 15479 | |
| 15480 | return ns; |
| 15481 | } |
| 15482 | |
| 15483 | /* Creates bindings and dependencies for all deduction guides of |
| 15484 | the given class template DECL as needed. */ |
| 15485 | |
| 15486 | void |
| 15487 | depset::hash::add_deduction_guides (tree decl) |
| 15488 | { |
| 15489 | /* Alias templates never have deduction guides. */ |
| 15490 | if (DECL_ALIAS_TEMPLATE_P (decl)) |
| 15491 | return; |
| 15492 | |
| 15493 | /* We don't need to do anything for class-scope deduction guides, |
| 15494 | as they will be added as members anyway. */ |
| 15495 | if (!DECL_NAMESPACE_SCOPE_P (decl)) |
| 15496 | return; |
| 15497 | |
| 15498 | tree ns = CP_DECL_CONTEXT (decl); |
| 15499 | tree name = dguide_name (decl); |
| 15500 | |
| 15501 | /* We always add all deduction guides with a given name at once, |
| 15502 | so if there's already a binding there's nothing to do. */ |
| 15503 | if (find_binding (ctx: ns, name)) |
| 15504 | return; |
| 15505 | |
| 15506 | tree guides = lookup_qualified_name (scope: ns, name, LOOK_want::NORMAL, |
| 15507 | /*complain=*/false); |
| 15508 | if (guides == error_mark_node) |
| 15509 | return; |
| 15510 | |
| 15511 | depset *binding = nullptr; |
| 15512 | for (tree t : lkp_range (guides)) |
| 15513 | { |
| 15514 | gcc_checking_assert (!TREE_VISITED (t)); |
| 15515 | depset *dep = make_dependency (decl: t, ek: EK_FOR_BINDING); |
| 15516 | |
| 15517 | /* We don't want to create bindings for imported deduction guides, as |
| 15518 | this would potentially cause name lookup to return duplicates. */ |
| 15519 | if (dep->is_import ()) |
| 15520 | continue; |
| 15521 | |
| 15522 | if (!binding) |
| 15523 | { |
| 15524 | /* We have bindings to add. */ |
| 15525 | binding = make_binding (ns, name); |
| 15526 | add_namespace_context (dep: binding, ns); |
| 15527 | |
| 15528 | depset **slot = binding_slot (ctx: ns, name, /*insert=*/true); |
| 15529 | *slot = binding; |
| 15530 | } |
| 15531 | |
| 15532 | binding->deps.safe_push (obj: dep); |
| 15533 | dep->deps.safe_push (obj: binding); |
| 15534 | dump (dumper::DEPEND) |
| 15535 | && dump ("Built binding for deduction guide %C:%N" , |
| 15536 | TREE_CODE (decl), decl); |
| 15537 | } |
| 15538 | } |
| 15539 | |
| 15540 | /* Iteratively find dependencies. During the walk we may find more |
| 15541 | entries on the same binding that need walking. */ |
| 15542 | |
| 15543 | void |
| 15544 | depset::hash::find_dependencies (module_state *module) |
| 15545 | { |
| 15546 | trees_out walker (NULL, module, *this); |
| 15547 | vec<depset *> unreached; |
| 15548 | unreached.create (nelems: worklist.length ()); |
| 15549 | |
| 15550 | for (;;) |
| 15551 | { |
| 15552 | reached_unreached = false; |
| 15553 | while (worklist.length ()) |
| 15554 | { |
| 15555 | depset *item = worklist.pop (); |
| 15556 | |
| 15557 | gcc_checking_assert (!item->is_binding ()); |
| 15558 | if (item->is_unreached ()) |
| 15559 | unreached.quick_push (obj: item); |
| 15560 | else |
| 15561 | { |
| 15562 | current = item; |
| 15563 | tree decl = current->get_entity (); |
| 15564 | dump (is_key_order () ? dumper::MERGE : dumper::DEPEND) |
| 15565 | && dump ("Dependencies of %s %C:%N" , |
| 15566 | is_key_order () ? "key-order" |
| 15567 | : current->entity_kind_name (), TREE_CODE (decl), decl); |
| 15568 | dump.indent (); |
| 15569 | walker.begin (); |
| 15570 | if (current->get_entity_kind () == EK_USING) |
| 15571 | walker.tree_node (OVL_FUNCTION (decl)); |
| 15572 | else if (current->get_entity_kind () == EK_TU_LOCAL) |
| 15573 | /* We only stream its name and location. */ |
| 15574 | module->note_location (DECL_SOURCE_LOCATION (decl)); |
| 15575 | else if (TREE_VISITED (decl)) |
| 15576 | /* A global tree. */; |
| 15577 | else if (current->get_entity_kind () == EK_NAMESPACE) |
| 15578 | { |
| 15579 | module->note_location (DECL_SOURCE_LOCATION (decl)); |
| 15580 | add_namespace_context (dep: current, CP_DECL_CONTEXT (decl)); |
| 15581 | } |
| 15582 | else |
| 15583 | { |
| 15584 | walker.mark_declaration (decl, do_defn: current->has_defn ()); |
| 15585 | |
| 15586 | if (!is_key_order () |
| 15587 | && item->is_pending_entity ()) |
| 15588 | { |
| 15589 | tree ns = find_pending_key (decl, decl_p: nullptr); |
| 15590 | add_namespace_context (dep: item, ns); |
| 15591 | } |
| 15592 | |
| 15593 | auto ovr = make_temp_override |
| 15594 | (var&: ignore_exposure, overrider: item->is_ignored_exposure_context ()); |
| 15595 | walker.decl_value (decl, dep: current); |
| 15596 | if (current->has_defn ()) |
| 15597 | walker.write_definition (decl, refs_tu_local: current->refs_tu_local ()); |
| 15598 | } |
| 15599 | walker.end (); |
| 15600 | |
| 15601 | /* If we see either a class template or a deduction guide, make |
| 15602 | sure to add all visible deduction guides. We need to check |
| 15603 | both in case they have been added in separate modules, or |
| 15604 | one is in the GMF and would have otherwise been discarded. */ |
| 15605 | if (!is_key_order () |
| 15606 | && DECL_CLASS_TEMPLATE_P (decl)) |
| 15607 | add_deduction_guides (decl); |
| 15608 | if (!is_key_order () |
| 15609 | && deduction_guide_p (decl)) |
| 15610 | add_deduction_guides (TYPE_NAME (TREE_TYPE (TREE_TYPE (decl)))); |
| 15611 | |
| 15612 | /* Handle dependent ADL for [module.global.frag] p3.3. */ |
| 15613 | if (!is_key_order () && !dep_adl_entity_list.is_empty ()) |
| 15614 | { |
| 15615 | processing_template_decl_sentinel ptds; |
| 15616 | ++processing_template_decl; |
| 15617 | for (auto &info : dep_adl_entity_list) |
| 15618 | { |
| 15619 | tree lookup = lookup_arg_dependent (info.name, NULL_TREE, |
| 15620 | info.args, tentative: true); |
| 15621 | for (tree fn : lkp_range (lookup)) |
| 15622 | add_dependency (dep: make_dependency (decl: fn, ek: EK_DECL)); |
| 15623 | |
| 15624 | if (info.rewrite) |
| 15625 | { |
| 15626 | tree rewrite_name = ovl_op_identifier (code: info.rewrite); |
| 15627 | lookup = lookup_arg_dependent (rewrite_name, NULL_TREE, |
| 15628 | info.args, tentative: true); |
| 15629 | for (tree fn : lkp_range (lookup)) |
| 15630 | add_dependency (dep: make_dependency (decl: fn, ek: EK_DECL)); |
| 15631 | } |
| 15632 | release_tree_vector (info.args); |
| 15633 | } |
| 15634 | dep_adl_entity_list.truncate (size: 0); |
| 15635 | } |
| 15636 | |
| 15637 | if (!is_key_order () |
| 15638 | && TREE_CODE (decl) == TEMPLATE_DECL |
| 15639 | && !DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl)) |
| 15640 | { |
| 15641 | /* Mark all the explicit & partial specializations as |
| 15642 | reachable. We search both specialization lists as some |
| 15643 | constrained partial specializations for class types are |
| 15644 | only found in DECL_TEMPLATE_SPECIALIZATIONS. */ |
| 15645 | auto mark_reached = [this](tree spec) |
| 15646 | { |
| 15647 | if (TYPE_P (spec)) |
| 15648 | spec = TYPE_NAME (spec); |
| 15649 | int use_tpl; |
| 15650 | node_template_info (decl: spec, use&: use_tpl); |
| 15651 | if (use_tpl & 2) |
| 15652 | { |
| 15653 | depset *spec_dep = find_dependency (decl: spec); |
| 15654 | if (spec_dep->get_entity_kind () == EK_REDIRECT) |
| 15655 | spec_dep = spec_dep->deps[0]; |
| 15656 | if (spec_dep->is_unreached ()) |
| 15657 | { |
| 15658 | reached_unreached = true; |
| 15659 | spec_dep->clear_flag_bit<DB_UNREACHED_BIT> (); |
| 15660 | dump (dumper::DEPEND) |
| 15661 | && dump ("Reaching unreached specialization" |
| 15662 | " %C:%N" , TREE_CODE (spec), spec); |
| 15663 | } |
| 15664 | } |
| 15665 | }; |
| 15666 | |
| 15667 | for (tree cons = DECL_TEMPLATE_INSTANTIATIONS (decl); |
| 15668 | cons; cons = TREE_CHAIN (cons)) |
| 15669 | mark_reached (TREE_VALUE (cons)); |
| 15670 | for (tree cons = DECL_TEMPLATE_SPECIALIZATIONS (decl); |
| 15671 | cons; cons = TREE_CHAIN (cons)) |
| 15672 | mark_reached (TREE_VALUE (cons)); |
| 15673 | } |
| 15674 | |
| 15675 | dump.outdent (); |
| 15676 | current = NULL; |
| 15677 | } |
| 15678 | } |
| 15679 | |
| 15680 | if (!reached_unreached) |
| 15681 | break; |
| 15682 | |
| 15683 | /* It's possible the we reached the unreached before we |
| 15684 | processed it in the above loop, so we'll be doing this an |
| 15685 | extra time. However, to avoid that we have to do some |
| 15686 | bit shuffling that also involves a scan of the list. |
| 15687 | Swings & roundabouts I guess. */ |
| 15688 | std::swap (a&: worklist, b&: unreached); |
| 15689 | } |
| 15690 | |
| 15691 | unreached.release (); |
| 15692 | } |
| 15693 | |
| 15694 | /* Compare two entries of a single binding. TYPE_DECL before |
| 15695 | non-exported before exported. */ |
| 15696 | |
| 15697 | static int |
| 15698 | binding_cmp (const void *a_, const void *b_) |
| 15699 | { |
| 15700 | depset *a = *(depset *const *)a_; |
| 15701 | depset *b = *(depset *const *)b_; |
| 15702 | |
| 15703 | tree a_ent = a->get_entity (); |
| 15704 | tree b_ent = b->get_entity (); |
| 15705 | gcc_checking_assert (a_ent != b_ent |
| 15706 | && !a->is_binding () |
| 15707 | && !b->is_binding ()); |
| 15708 | |
| 15709 | /* Implicit typedefs come first. */ |
| 15710 | bool a_implicit = DECL_IMPLICIT_TYPEDEF_P (a_ent); |
| 15711 | bool b_implicit = DECL_IMPLICIT_TYPEDEF_P (b_ent); |
| 15712 | if (a_implicit || b_implicit) |
| 15713 | { |
| 15714 | /* A binding with two implicit type decls? That's unpossible! */ |
| 15715 | gcc_checking_assert (!(a_implicit && b_implicit)); |
| 15716 | return a_implicit ? -1 : +1; /* Implicit first. */ |
| 15717 | } |
| 15718 | |
| 15719 | /* TU-local before non-TU-local. */ |
| 15720 | bool a_internal = a->get_entity_kind () == depset::EK_TU_LOCAL; |
| 15721 | bool b_internal = b->get_entity_kind () == depset::EK_TU_LOCAL; |
| 15722 | if (a_internal != b_internal) |
| 15723 | return a_internal ? -1 : +1; /* Internal first. */ |
| 15724 | |
| 15725 | /* Hidden before non-hidden. */ |
| 15726 | bool a_hidden = a->is_hidden (); |
| 15727 | bool b_hidden = b->is_hidden (); |
| 15728 | if (a_hidden != b_hidden) |
| 15729 | return a_hidden ? -1 : +1; |
| 15730 | |
| 15731 | bool a_using = a->get_entity_kind () == depset::EK_USING; |
| 15732 | bool a_export; |
| 15733 | if (a_using) |
| 15734 | { |
| 15735 | a_export = OVL_EXPORT_P (a_ent); |
| 15736 | a_ent = OVL_FUNCTION (a_ent); |
| 15737 | } |
| 15738 | else if (TREE_CODE (a_ent) == CONST_DECL |
| 15739 | && DECL_LANG_SPECIFIC (a_ent) |
| 15740 | && DECL_MODULE_EXPORT_P (a_ent)) |
| 15741 | a_export = true; |
| 15742 | else |
| 15743 | a_export = DECL_MODULE_EXPORT_P (TREE_CODE (a_ent) == CONST_DECL |
| 15744 | ? TYPE_NAME (TREE_TYPE (a_ent)) |
| 15745 | : STRIP_TEMPLATE (a_ent)); |
| 15746 | |
| 15747 | bool b_using = b->get_entity_kind () == depset::EK_USING; |
| 15748 | bool b_export; |
| 15749 | if (b_using) |
| 15750 | { |
| 15751 | b_export = OVL_EXPORT_P (b_ent); |
| 15752 | b_ent = OVL_FUNCTION (b_ent); |
| 15753 | } |
| 15754 | else if (TREE_CODE (b_ent) == CONST_DECL |
| 15755 | && DECL_LANG_SPECIFIC (b_ent) |
| 15756 | && DECL_MODULE_EXPORT_P (b_ent)) |
| 15757 | b_export = true; |
| 15758 | else |
| 15759 | b_export = DECL_MODULE_EXPORT_P (TREE_CODE (b_ent) == CONST_DECL |
| 15760 | ? TYPE_NAME (TREE_TYPE (b_ent)) |
| 15761 | : STRIP_TEMPLATE (b_ent)); |
| 15762 | |
| 15763 | /* Non-exports before exports. */ |
| 15764 | if (a_export != b_export) |
| 15765 | return a_export ? +1 : -1; |
| 15766 | |
| 15767 | /* At this point we don't care, but want a stable sort. */ |
| 15768 | |
| 15769 | if (a_using != b_using) |
| 15770 | /* using first. */ |
| 15771 | return a_using? -1 : +1; |
| 15772 | |
| 15773 | return DECL_UID (a_ent) < DECL_UID (b_ent) ? -1 : +1; |
| 15774 | } |
| 15775 | |
| 15776 | /* True iff TMPL has an explicit instantiation definition. |
| 15777 | |
| 15778 | This is local to module.cc because register_specialization skips adding most |
| 15779 | instantiations unless module_maybe_has_cmi_p. */ |
| 15780 | |
| 15781 | static bool |
| 15782 | template_has_explicit_inst (tree tmpl) |
| 15783 | { |
| 15784 | for (tree t = DECL_TEMPLATE_INSTANTIATIONS (tmpl); t; t = TREE_CHAIN (t)) |
| 15785 | { |
| 15786 | tree spec = TREE_VALUE (t); |
| 15787 | if (DECL_EXPLICIT_INSTANTIATION (spec) |
| 15788 | && !DECL_REALLY_EXTERN (spec)) |
| 15789 | return true; |
| 15790 | } |
| 15791 | return false; |
| 15792 | } |
| 15793 | |
| 15794 | /* Complain about DEP that exposes a TU-local entity. |
| 15795 | |
| 15796 | If STRICT, DEP only referenced entities from the GMF. Returns TRUE |
| 15797 | if we explained anything. */ |
| 15798 | |
| 15799 | bool |
| 15800 | depset::hash::diagnose_bad_internal_ref (depset *dep, bool strict) |
| 15801 | { |
| 15802 | tree decl = dep->get_entity (); |
| 15803 | |
| 15804 | /* Don't need to walk if we're not going to be emitting |
| 15805 | any diagnostics anyway. */ |
| 15806 | if (strict && !warning_enabled_at (DECL_SOURCE_LOCATION (decl), |
| 15807 | opt_id: OPT_Wexpose_global_module_tu_local)) |
| 15808 | return false; |
| 15809 | |
| 15810 | for (depset *rdep : dep->deps) |
| 15811 | if (!rdep->is_binding () && rdep->is_tu_local (strict) |
| 15812 | && !is_exposure_of_member_type (source: dep, ref: rdep)) |
| 15813 | { |
| 15814 | // FIXME:QOI Better location information? We're |
| 15815 | // losing, so it doesn't matter about efficiency. |
| 15816 | tree exposed = rdep->get_entity (); |
| 15817 | auto_diagnostic_group d; |
| 15818 | if (strict) |
| 15819 | { |
| 15820 | /* Allow suppressing the warning from the point of declaration |
| 15821 | of the otherwise-exposed decl, for cases we know that |
| 15822 | exposures will never be 'bad'. */ |
| 15823 | if (warning_enabled_at (DECL_SOURCE_LOCATION (exposed), |
| 15824 | opt_id: OPT_Wexpose_global_module_tu_local) |
| 15825 | && pedwarn (DECL_SOURCE_LOCATION (decl), |
| 15826 | OPT_Wexpose_global_module_tu_local, |
| 15827 | "%qD exposes TU-local entity %qD" , decl, exposed)) |
| 15828 | { |
| 15829 | bool informed = is_tu_local_entity (decl: exposed, /*explain=*/true); |
| 15830 | gcc_checking_assert (informed); |
| 15831 | return true; |
| 15832 | } |
| 15833 | } |
| 15834 | else |
| 15835 | { |
| 15836 | error_at (DECL_SOURCE_LOCATION (decl), |
| 15837 | "%qD exposes TU-local entity %qD" , decl, exposed); |
| 15838 | bool informed = is_tu_local_entity (decl: exposed, /*explain=*/true); |
| 15839 | gcc_checking_assert (informed); |
| 15840 | if (dep->is_tu_local (/*strict=*/true)) |
| 15841 | inform (DECL_SOURCE_LOCATION (decl), |
| 15842 | "%qD is also TU-local but has been exposed elsewhere" , |
| 15843 | decl); |
| 15844 | return true; |
| 15845 | } |
| 15846 | } |
| 15847 | |
| 15848 | return false; |
| 15849 | } |
| 15850 | |
| 15851 | /* Warn about a template DEP that references a TU-local entity. |
| 15852 | |
| 15853 | If STRICT, DEP only referenced entities from the GMF. Returns TRUE |
| 15854 | if we explained anything. */ |
| 15855 | |
| 15856 | bool |
| 15857 | depset::hash::diagnose_template_names_tu_local (depset *dep, bool strict) |
| 15858 | { |
| 15859 | tree decl = dep->get_entity (); |
| 15860 | |
| 15861 | /* Don't bother walking if we know we won't be emitting anything. */ |
| 15862 | if (!warning_enabled_at (DECL_SOURCE_LOCATION (decl), |
| 15863 | opt_id: OPT_Wtemplate_names_tu_local) |
| 15864 | /* Only warn strictly if users haven't silenced this warning here. */ |
| 15865 | || (strict && !warning_enabled_at (DECL_SOURCE_LOCATION (decl), |
| 15866 | opt_id: OPT_Wexpose_global_module_tu_local))) |
| 15867 | return false; |
| 15868 | |
| 15869 | /* Friend decls in a class body are ignored, but this is harmless: |
| 15870 | it should not impact any consumers. */ |
| 15871 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl))) |
| 15872 | return false; |
| 15873 | |
| 15874 | /* We should now only be warning about templates. */ |
| 15875 | gcc_checking_assert |
| 15876 | (TREE_CODE (decl) == TEMPLATE_DECL |
| 15877 | && VAR_OR_FUNCTION_DECL_P (DECL_TEMPLATE_RESULT (decl))); |
| 15878 | |
| 15879 | /* Don't warn if we've seen any explicit instantiation definitions, |
| 15880 | the intent might be for importers to only use those. */ |
| 15881 | if (template_has_explicit_inst (tmpl: decl)) |
| 15882 | return false; |
| 15883 | |
| 15884 | for (depset *rdep : dep->deps) |
| 15885 | if (!rdep->is_binding () && rdep->is_tu_local (strict)) |
| 15886 | { |
| 15887 | tree ref = rdep->get_entity (); |
| 15888 | auto_diagnostic_group d; |
| 15889 | if (strict) |
| 15890 | { |
| 15891 | if (warning_enabled_at (DECL_SOURCE_LOCATION (ref), |
| 15892 | opt_id: OPT_Wexpose_global_module_tu_local) |
| 15893 | && warning_at (DECL_SOURCE_LOCATION (decl), |
| 15894 | OPT_Wtemplate_names_tu_local, |
| 15895 | "%qD refers to TU-local entity %qD, which may " |
| 15896 | "cause issues when instantiating in other TUs" , |
| 15897 | decl, ref)) |
| 15898 | { |
| 15899 | is_tu_local_entity (decl: ref, /*explain=*/true); |
| 15900 | return true; |
| 15901 | } |
| 15902 | } |
| 15903 | else if (warning_at (DECL_SOURCE_LOCATION (decl), |
| 15904 | OPT_Wtemplate_names_tu_local, |
| 15905 | "%qD refers to TU-local entity %qD and cannot " |
| 15906 | "be instantiated in other TUs" , decl, ref)) |
| 15907 | { |
| 15908 | is_tu_local_entity (decl: ref, /*explain=*/true); |
| 15909 | return true; |
| 15910 | } |
| 15911 | } |
| 15912 | |
| 15913 | return false; |
| 15914 | } |
| 15915 | |
| 15916 | /* Sort the bindings, issue errors about bad internal refs. */ |
| 15917 | |
| 15918 | bool |
| 15919 | depset::hash::finalize_dependencies () |
| 15920 | { |
| 15921 | bool ok = true; |
| 15922 | for (depset *dep : *this) |
| 15923 | { |
| 15924 | if (dep->is_binding ()) |
| 15925 | { |
| 15926 | /* Keep the containing namespace dep first. */ |
| 15927 | gcc_checking_assert (dep->deps.length () > 1 |
| 15928 | && (dep->deps[0]->get_entity_kind () |
| 15929 | == EK_NAMESPACE) |
| 15930 | && (dep->deps[0]->get_entity () |
| 15931 | == dep->get_entity ())); |
| 15932 | if (dep->deps.length () > 2) |
| 15933 | gcc_qsort (&dep->deps[1], dep->deps.length () - 1, |
| 15934 | sizeof (dep->deps[1]), binding_cmp); |
| 15935 | |
| 15936 | /* Bindings shouldn't refer to imported entities. */ |
| 15937 | if (CHECKING_P) |
| 15938 | for (depset *entity : dep->deps) |
| 15939 | gcc_checking_assert (!entity->is_import ()); |
| 15940 | continue; |
| 15941 | } |
| 15942 | |
| 15943 | /* Otherwise, we'll check for bad internal refs. |
| 15944 | Don't complain about any references from TU-local entities. */ |
| 15945 | if (dep->is_tu_local ()) |
| 15946 | continue; |
| 15947 | |
| 15948 | if (dep->is_exposure ()) |
| 15949 | { |
| 15950 | bool explained = diagnose_bad_internal_ref (dep); |
| 15951 | |
| 15952 | /* A TU-local variable will always be considered an exposure, |
| 15953 | so we don't have to worry about strict-only handling. */ |
| 15954 | tree decl = dep->get_entity (); |
| 15955 | if (!explained |
| 15956 | && VAR_P (decl) |
| 15957 | && (DECL_DECLARED_CONSTEXPR_P (decl) |
| 15958 | || DECL_INLINE_VAR_P (decl))) |
| 15959 | { |
| 15960 | auto_diagnostic_group d; |
| 15961 | if (DECL_DECLARED_CONSTEXPR_P (decl)) |
| 15962 | error_at (DECL_SOURCE_LOCATION (decl), |
| 15963 | "%qD is declared %<constexpr%> and is initialized to " |
| 15964 | "a TU-local value" , decl); |
| 15965 | else |
| 15966 | { |
| 15967 | /* This can only occur with references. */ |
| 15968 | gcc_checking_assert (TYPE_REF_P (TREE_TYPE (decl))); |
| 15969 | error_at (DECL_SOURCE_LOCATION (decl), |
| 15970 | "%qD is a reference declared %<inline%> and is " |
| 15971 | "constant-initialized to a TU-local value" , decl); |
| 15972 | } |
| 15973 | bool informed = is_tu_local_value (decl, DECL_INITIAL (decl), |
| 15974 | /*explain=*/true); |
| 15975 | gcc_checking_assert (informed); |
| 15976 | explained = true; |
| 15977 | } |
| 15978 | |
| 15979 | /* We should have emitted an error above, unless the warning was |
| 15980 | silenced. */ |
| 15981 | gcc_checking_assert (explained); |
| 15982 | ok = false; |
| 15983 | continue; |
| 15984 | } |
| 15985 | |
| 15986 | /* In all other cases, we're just warning (rather than erroring). |
| 15987 | We don't want to do too much warning, so let's just bail after |
| 15988 | the first warning we successfully emit. */ |
| 15989 | if (warn_expose_global_module_tu_local |
| 15990 | && !dep->is_tu_local (/*strict=*/true) |
| 15991 | && dep->is_exposure (/*strict=*/true) |
| 15992 | && diagnose_bad_internal_ref (dep, /*strict=*/true)) |
| 15993 | continue; |
| 15994 | |
| 15995 | if (warn_template_names_tu_local |
| 15996 | && dep->refs_tu_local () |
| 15997 | && diagnose_template_names_tu_local (dep)) |
| 15998 | continue; |
| 15999 | |
| 16000 | if (warn_template_names_tu_local |
| 16001 | && warn_expose_global_module_tu_local |
| 16002 | && !dep->is_tu_local (/*strict=*/true) |
| 16003 | && dep->refs_tu_local (/*strict=*/true) |
| 16004 | && !dep->is_exposure (/*strict=*/true) |
| 16005 | && diagnose_template_names_tu_local (dep, /*strict=*/true)) |
| 16006 | continue; |
| 16007 | } |
| 16008 | |
| 16009 | return ok; |
| 16010 | } |
| 16011 | |
| 16012 | /* Core of TARJAN's algorithm to find Strongly Connected Components |
| 16013 | within a graph. See https://en.wikipedia.org/wiki/ |
| 16014 | Tarjan%27s_strongly_connected_components_algorithm for details. |
| 16015 | |
| 16016 | We use depset::section as lowlink. Completed nodes have |
| 16017 | depset::cluster containing the cluster number, with the top |
| 16018 | bit set. |
| 16019 | |
| 16020 | A useful property is that the output vector is a reverse |
| 16021 | topological sort of the resulting DAG. In our case that means |
| 16022 | dependent SCCs are found before their dependers. We make use of |
| 16023 | that property. */ |
| 16024 | |
| 16025 | void |
| 16026 | depset::tarjan::connect (depset *v) |
| 16027 | { |
| 16028 | gcc_checking_assert (v->is_binding () |
| 16029 | || !(v->is_tu_local () |
| 16030 | || v->is_unreached () |
| 16031 | || v->is_import ())); |
| 16032 | |
| 16033 | v->cluster = v->section = ++index; |
| 16034 | stack.safe_push (obj: v); |
| 16035 | |
| 16036 | /* Walk all our dependencies, ignore a first marked slot */ |
| 16037 | for (unsigned ix = v->is_special (); ix != v->deps.length (); ix++) |
| 16038 | { |
| 16039 | depset *dep = v->deps[ix]; |
| 16040 | |
| 16041 | if (dep->is_binding () |
| 16042 | || !(dep->is_import () || dep->is_tu_local ())) |
| 16043 | { |
| 16044 | unsigned lwm = dep->cluster; |
| 16045 | |
| 16046 | if (!dep->cluster) |
| 16047 | { |
| 16048 | /* A new node. Connect it. */ |
| 16049 | connect (v: dep); |
| 16050 | lwm = dep->section; |
| 16051 | } |
| 16052 | |
| 16053 | if (dep->section && v->section > lwm) |
| 16054 | v->section = lwm; |
| 16055 | } |
| 16056 | } |
| 16057 | |
| 16058 | if (v->section == v->cluster) |
| 16059 | { |
| 16060 | /* Root of a new SCC. Push all the members onto the result list. */ |
| 16061 | unsigned num = v->cluster; |
| 16062 | depset *p; |
| 16063 | do |
| 16064 | { |
| 16065 | p = stack.pop (); |
| 16066 | p->cluster = num; |
| 16067 | p->section = 0; |
| 16068 | result.quick_push (obj: p); |
| 16069 | } |
| 16070 | while (p != v); |
| 16071 | } |
| 16072 | } |
| 16073 | |
| 16074 | /* Compare two depsets. The specific ordering is unimportant, we're |
| 16075 | just trying to get consistency. */ |
| 16076 | |
| 16077 | static int |
| 16078 | depset_cmp (const void *a_, const void *b_) |
| 16079 | { |
| 16080 | depset *a = *(depset *const *)a_; |
| 16081 | depset *b = *(depset *const *)b_; |
| 16082 | |
| 16083 | depset::entity_kind a_kind = a->get_entity_kind (); |
| 16084 | depset::entity_kind b_kind = b->get_entity_kind (); |
| 16085 | |
| 16086 | if (a_kind != b_kind) |
| 16087 | /* Different entity kinds, order by that. */ |
| 16088 | return a_kind < b_kind ? -1 : +1; |
| 16089 | |
| 16090 | tree a_decl = a->get_entity (); |
| 16091 | tree b_decl = b->get_entity (); |
| 16092 | if (a_kind == depset::EK_USING) |
| 16093 | { |
| 16094 | /* If one is a using, the other must be too. */ |
| 16095 | a_decl = OVL_FUNCTION (a_decl); |
| 16096 | b_decl = OVL_FUNCTION (b_decl); |
| 16097 | } |
| 16098 | |
| 16099 | if (a_decl != b_decl) |
| 16100 | /* Different entities, order by their UID. */ |
| 16101 | return DECL_UID (a_decl) < DECL_UID (b_decl) ? -1 : +1; |
| 16102 | |
| 16103 | if (a_kind == depset::EK_BINDING) |
| 16104 | { |
| 16105 | /* Both are bindings. Order by identifier hash. */ |
| 16106 | gcc_checking_assert (a->get_name () != b->get_name ()); |
| 16107 | hashval_t ah = IDENTIFIER_HASH_VALUE (a->get_name ()); |
| 16108 | hashval_t bh = IDENTIFIER_HASH_VALUE (b->get_name ()); |
| 16109 | return (ah == bh ? 0 : ah < bh ? -1 : +1); |
| 16110 | } |
| 16111 | |
| 16112 | /* They are the same decl. This can happen with two using decls |
| 16113 | pointing to the same target. The best we can aim for is |
| 16114 | consistently telling qsort how to order them. Hopefully we'll |
| 16115 | never have to debug a case that depends on this. Oh, who am I |
| 16116 | kidding? Good luck. */ |
| 16117 | gcc_checking_assert (a_kind == depset::EK_USING); |
| 16118 | |
| 16119 | /* Order by depset address. Not the best, but it is something. */ |
| 16120 | return a < b ? -1 : +1; |
| 16121 | } |
| 16122 | |
| 16123 | /* Sort the clusters in SCC such that those that depend on one another |
| 16124 | are placed later. */ |
| 16125 | |
| 16126 | // FIXME: I am not convinced this is needed and, if needed, |
| 16127 | // sufficient. We emit the decls in this order but that emission |
| 16128 | // could walk into later decls (from the body of the decl, or default |
| 16129 | // arg-like things). Why doesn't that walk do the right thing? And |
| 16130 | // if it DTRT why do we need to sort here -- won't things naturally |
| 16131 | // work? I think part of the issue is that when we're going to refer |
| 16132 | // to an entity by name, and that entity is in the same cluster as us, |
| 16133 | // we need to actually walk that entity, if we've not already walked |
| 16134 | // it. |
| 16135 | static void |
| 16136 | sort_cluster (depset::hash *original, depset *scc[], unsigned size) |
| 16137 | { |
| 16138 | depset::hash table (size, original); |
| 16139 | |
| 16140 | dump.indent (); |
| 16141 | |
| 16142 | /* Place bindings last, usings before that. It's not strictly |
| 16143 | necessary, but it does make things neater. Says Mr OCD. */ |
| 16144 | unsigned bind_lwm = size; |
| 16145 | unsigned use_lwm = size; |
| 16146 | for (unsigned ix = 0; ix != use_lwm;) |
| 16147 | { |
| 16148 | depset *dep = scc[ix]; |
| 16149 | switch (dep->get_entity_kind ()) |
| 16150 | { |
| 16151 | case depset::EK_BINDING: |
| 16152 | /* Move to end. No increment. Notice this could be moving |
| 16153 | a using decl, which we'll then move again. */ |
| 16154 | if (--bind_lwm != ix) |
| 16155 | { |
| 16156 | scc[ix] = scc[bind_lwm]; |
| 16157 | scc[bind_lwm] = dep; |
| 16158 | } |
| 16159 | if (use_lwm > bind_lwm) |
| 16160 | { |
| 16161 | use_lwm--; |
| 16162 | break; |
| 16163 | } |
| 16164 | /* We must have copied a using or TU-local, so move it too. */ |
| 16165 | dep = scc[ix]; |
| 16166 | gcc_checking_assert |
| 16167 | (dep->get_entity_kind () == depset::EK_USING |
| 16168 | || dep->get_entity_kind () == depset::EK_TU_LOCAL); |
| 16169 | /* FALLTHROUGH */ |
| 16170 | |
| 16171 | case depset::EK_USING: |
| 16172 | case depset::EK_TU_LOCAL: |
| 16173 | if (--use_lwm != ix) |
| 16174 | { |
| 16175 | scc[ix] = scc[use_lwm]; |
| 16176 | scc[use_lwm] = dep; |
| 16177 | } |
| 16178 | break; |
| 16179 | |
| 16180 | case depset::EK_DECL: |
| 16181 | case depset::EK_SPECIALIZATION: |
| 16182 | case depset::EK_PARTIAL: |
| 16183 | table.add_mergeable (mergeable: dep); |
| 16184 | ix++; |
| 16185 | break; |
| 16186 | |
| 16187 | default: |
| 16188 | gcc_unreachable (); |
| 16189 | } |
| 16190 | } |
| 16191 | |
| 16192 | gcc_checking_assert (use_lwm <= bind_lwm); |
| 16193 | dump (dumper::MERGE) && dump ("Ordering %u/%u depsets" , use_lwm, size); |
| 16194 | |
| 16195 | table.find_dependencies (module: nullptr); |
| 16196 | |
| 16197 | auto_vec<depset *> order = table.connect (); |
| 16198 | gcc_checking_assert (order.length () == use_lwm); |
| 16199 | |
| 16200 | /* Now rewrite entries [0,lwm), in the dependency order we |
| 16201 | discovered. Usually each entity is in its own cluster. Rarely, |
| 16202 | we can get multi-entity clusters, in which case all but one must |
| 16203 | only be reached from within the cluster. This happens for |
| 16204 | something like: |
| 16205 | |
| 16206 | template<typename T> |
| 16207 | auto Foo (const T &arg) -> TPL<decltype (arg)>; |
| 16208 | |
| 16209 | The instantiation of TPL will be in the specialization table, and |
| 16210 | refer to Foo via arg. But we can only get to that specialization |
| 16211 | from Foo's declaration, so we only need to treat Foo as mergable |
| 16212 | (We'll do structural comparison of TPL<decltype (arg)>). |
| 16213 | |
| 16214 | We approximate finding the single cluster entry dep by checking for |
| 16215 | entities recursively depending on a dep first seen when streaming |
| 16216 | its own merge key; the first dep we see in such a cluster should be |
| 16217 | the first one streamed. */ |
| 16218 | unsigned entry_pos = ~0u; |
| 16219 | unsigned cluster = ~0u; |
| 16220 | for (unsigned ix = 0; ix != order.length (); ix++) |
| 16221 | { |
| 16222 | gcc_checking_assert (order[ix]->is_special ()); |
| 16223 | bool tight = order[ix]->cluster == cluster; |
| 16224 | depset *dep = order[ix]->deps[0]; |
| 16225 | dump (dumper::MERGE) |
| 16226 | && dump ("Mergeable %u is %N%s%s" , ix, dep->get_entity (), |
| 16227 | tight ? " (tight)" : "" , dep->is_entry () ? " (entry)" : "" ); |
| 16228 | scc[ix] = dep; |
| 16229 | if (tight) |
| 16230 | { |
| 16231 | gcc_checking_assert (dep->is_maybe_recursive ()); |
| 16232 | if (dep->is_entry ()) |
| 16233 | { |
| 16234 | /* There should only be one entry dep in a cluster. */ |
| 16235 | gcc_checking_assert (!scc[entry_pos]->is_entry ()); |
| 16236 | gcc_checking_assert (scc[entry_pos]->is_maybe_recursive ()); |
| 16237 | scc[ix] = scc[entry_pos]; |
| 16238 | scc[entry_pos] = dep; |
| 16239 | } |
| 16240 | } |
| 16241 | else |
| 16242 | entry_pos = ix; |
| 16243 | cluster = order[ix]->cluster; |
| 16244 | } |
| 16245 | |
| 16246 | dump (dumper::MERGE) && dump ("Ordered %u keys" , order.length ()); |
| 16247 | dump.outdent (); |
| 16248 | } |
| 16249 | |
| 16250 | /* Reduce graph to SCCS clusters. SCCS will be populated with the |
| 16251 | depsets in dependency order. Each depset's CLUSTER field contains |
| 16252 | its cluster number. Each SCC has a unique cluster number, and are |
| 16253 | contiguous in SCCS. Cluster numbers are otherwise arbitrary. */ |
| 16254 | |
| 16255 | vec<depset *> |
| 16256 | depset::hash::connect () |
| 16257 | { |
| 16258 | tarjan connector (size ()); |
| 16259 | vec<depset *> deps; |
| 16260 | deps.create (nelems: size ()); |
| 16261 | for (depset *item : *this) |
| 16262 | { |
| 16263 | entity_kind kind = item->get_entity_kind (); |
| 16264 | if (kind == EK_BINDING |
| 16265 | || !(kind == EK_REDIRECT |
| 16266 | || item->is_tu_local () |
| 16267 | || item->is_unreached () |
| 16268 | || item->is_import ())) |
| 16269 | deps.quick_push (obj: item); |
| 16270 | } |
| 16271 | |
| 16272 | /* Iteration over the hash table is an unspecified ordering. While |
| 16273 | that has advantages, it causes 2 problems. Firstly repeatable |
| 16274 | builds are tricky. Secondly creating testcases that check |
| 16275 | dependencies are correct by making sure a bad ordering would |
| 16276 | happen if that was wrong. */ |
| 16277 | deps.qsort (depset_cmp); |
| 16278 | |
| 16279 | while (deps.length ()) |
| 16280 | { |
| 16281 | depset *v = deps.pop (); |
| 16282 | dump (dumper::CLUSTER) && |
| 16283 | (v->is_binding () |
| 16284 | ? dump ("Connecting binding %P" , v->get_entity (), v->get_name ()) |
| 16285 | : dump ("Connecting %s %s %C:%N" , |
| 16286 | is_key_order () ? "key-order" |
| 16287 | : !v->has_defn () ? "declaration" : "definition" , |
| 16288 | v->entity_kind_name (), TREE_CODE (v->get_entity ()), |
| 16289 | v->get_entity ())); |
| 16290 | if (!v->cluster) |
| 16291 | connector.connect (v); |
| 16292 | } |
| 16293 | |
| 16294 | deps.release (); |
| 16295 | return connector.result; |
| 16296 | } |
| 16297 | |
| 16298 | /* Initialize location spans. */ |
| 16299 | |
| 16300 | void |
| 16301 | loc_spans::init (const line_maps *lmaps, const line_map_ordinary *map) |
| 16302 | { |
| 16303 | gcc_checking_assert (!init_p ()); |
| 16304 | spans = new vec<span> (); |
| 16305 | spans->reserve (nelems: 20); |
| 16306 | |
| 16307 | span interval; |
| 16308 | interval.ordinary.first = 0; |
| 16309 | interval.macro.second = MAX_LOCATION_T + 1; |
| 16310 | interval.ordinary_delta = interval.macro_delta = 0; |
| 16311 | |
| 16312 | /* A span for reserved fixed locs. */ |
| 16313 | interval.ordinary.second |
| 16314 | = MAP_START_LOCATION (map: LINEMAPS_ORDINARY_MAP_AT (set: line_table, index: 0)); |
| 16315 | interval.macro.first = interval.macro.second; |
| 16316 | dump (dumper::LOCATION) |
| 16317 | && dump ("Fixed span %u ordinary:[%K,%K) macro:[%K,%K)" , spans->length (), |
| 16318 | interval.ordinary.first, interval.ordinary.second, |
| 16319 | interval.macro.first, interval.macro.second); |
| 16320 | spans->quick_push (obj: interval); |
| 16321 | |
| 16322 | /* A span for command line & forced headers. */ |
| 16323 | interval.ordinary.first = interval.ordinary.second; |
| 16324 | interval.macro.second = interval.macro.first; |
| 16325 | if (map) |
| 16326 | { |
| 16327 | interval.ordinary.second = map->start_location; |
| 16328 | interval.macro.first = LINEMAPS_MACRO_LOWEST_LOCATION (set: lmaps); |
| 16329 | } |
| 16330 | dump (dumper::LOCATION) |
| 16331 | && dump ("Pre span %u ordinary:[%K,%K) macro:[%K,%K)" , spans->length (), |
| 16332 | interval.ordinary.first, interval.ordinary.second, |
| 16333 | interval.macro.first, interval.macro.second); |
| 16334 | spans->quick_push (obj: interval); |
| 16335 | |
| 16336 | /* Start an interval for the main file. */ |
| 16337 | interval.ordinary.first = interval.ordinary.second; |
| 16338 | interval.macro.second = interval.macro.first; |
| 16339 | dump (dumper::LOCATION) |
| 16340 | && dump ("Main span %u ordinary:[%K,*) macro:[*,%K)" , spans->length (), |
| 16341 | interval.ordinary.first, interval.macro.second); |
| 16342 | spans->quick_push (obj: interval); |
| 16343 | } |
| 16344 | |
| 16345 | /* Reopen the span, if we want the about-to-be-inserted set of maps to |
| 16346 | be propagated in our own location table. I.e. we are the primary |
| 16347 | interface and we're importing a partition. */ |
| 16348 | |
| 16349 | bool |
| 16350 | loc_spans::maybe_propagate (module_state *import, location_t hwm) |
| 16351 | { |
| 16352 | bool opened = (module_interface_p () && !module_partition_p () |
| 16353 | && import->is_partition ()); |
| 16354 | if (opened) |
| 16355 | open (hwm); |
| 16356 | return opened; |
| 16357 | } |
| 16358 | |
| 16359 | /* Open a new linemap interval. The just-created ordinary map is the |
| 16360 | first map of the interval. */ |
| 16361 | |
| 16362 | void |
| 16363 | loc_spans::open (location_t hwm) |
| 16364 | { |
| 16365 | span interval; |
| 16366 | interval.ordinary.first = interval.ordinary.second = hwm; |
| 16367 | interval.macro.first = interval.macro.second |
| 16368 | = LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table); |
| 16369 | interval.ordinary_delta = interval.macro_delta = 0; |
| 16370 | dump (dumper::LOCATION) |
| 16371 | && dump ("Opening span %u ordinary:[%K,... macro:...,%K)" , |
| 16372 | spans->length (), interval.ordinary.first, |
| 16373 | interval.macro.second); |
| 16374 | if (spans->length ()) |
| 16375 | { |
| 16376 | /* No overlapping! */ |
| 16377 | auto &last = spans->last (); |
| 16378 | gcc_checking_assert (interval.ordinary.first >= last.ordinary.second); |
| 16379 | gcc_checking_assert (interval.macro.second <= last.macro.first); |
| 16380 | } |
| 16381 | spans->safe_push (obj: interval); |
| 16382 | } |
| 16383 | |
| 16384 | /* Close out the current linemap interval. The last maps are within |
| 16385 | the interval. */ |
| 16386 | |
| 16387 | void |
| 16388 | loc_spans::close () |
| 16389 | { |
| 16390 | span &interval = spans->last (); |
| 16391 | |
| 16392 | interval.ordinary.second |
| 16393 | = ((line_table->highest_location |
| 16394 | + (loc_one << line_table->default_range_bits)) |
| 16395 | & ~((loc_one << line_table->default_range_bits) - 1)); |
| 16396 | interval.macro.first = LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table); |
| 16397 | dump (dumper::LOCATION) |
| 16398 | && dump ("Closing span %u ordinary:[%K,%K) macro:[%K,%K)" , |
| 16399 | spans->length () - 1, |
| 16400 | interval.ordinary.first,interval.ordinary.second, |
| 16401 | interval.macro.first, interval.macro.second); |
| 16402 | } |
| 16403 | |
| 16404 | /* Given an ordinary location LOC, return the lmap_interval it resides |
| 16405 | in. NULL if it is not in an interval. */ |
| 16406 | |
| 16407 | const loc_spans::span * |
| 16408 | loc_spans::ordinary (location_t loc) |
| 16409 | { |
| 16410 | unsigned len = spans->length (); |
| 16411 | unsigned pos = 0; |
| 16412 | while (len) |
| 16413 | { |
| 16414 | unsigned half = len / 2; |
| 16415 | const span &probe = (*spans)[pos + half]; |
| 16416 | if (loc < probe.ordinary.first) |
| 16417 | len = half; |
| 16418 | else if (loc < probe.ordinary.second) |
| 16419 | return &probe; |
| 16420 | else |
| 16421 | { |
| 16422 | pos += half + 1; |
| 16423 | len = len - (half + 1); |
| 16424 | } |
| 16425 | } |
| 16426 | return NULL; |
| 16427 | } |
| 16428 | |
| 16429 | /* Likewise, given a macro location LOC, return the lmap interval it |
| 16430 | resides in. */ |
| 16431 | |
| 16432 | const loc_spans::span * |
| 16433 | loc_spans::macro (location_t loc) |
| 16434 | { |
| 16435 | unsigned len = spans->length (); |
| 16436 | unsigned pos = 0; |
| 16437 | while (len) |
| 16438 | { |
| 16439 | unsigned half = len / 2; |
| 16440 | const span &probe = (*spans)[pos + half]; |
| 16441 | if (loc >= probe.macro.second) |
| 16442 | len = half; |
| 16443 | else if (loc >= probe.macro.first) |
| 16444 | return &probe; |
| 16445 | else |
| 16446 | { |
| 16447 | pos += half + 1; |
| 16448 | len = len - (half + 1); |
| 16449 | } |
| 16450 | } |
| 16451 | return NULL; |
| 16452 | } |
| 16453 | |
| 16454 | /* Return the ordinary location closest to FROM. */ |
| 16455 | |
| 16456 | static location_t |
| 16457 | ordinary_loc_of (line_maps *lmaps, location_t from) |
| 16458 | { |
| 16459 | while (!IS_ORDINARY_LOC (loc: from)) |
| 16460 | { |
| 16461 | if (IS_ADHOC_LOC (loc: from)) |
| 16462 | from = get_location_from_adhoc_loc (lmaps, from); |
| 16463 | if (from >= LINEMAPS_MACRO_LOWEST_LOCATION (set: lmaps)) |
| 16464 | { |
| 16465 | /* Find the ordinary location nearest FROM. */ |
| 16466 | const line_map *map = linemap_lookup (lmaps, from); |
| 16467 | const line_map_macro *mac_map = linemap_check_macro (map); |
| 16468 | from = mac_map->get_expansion_point_location (); |
| 16469 | } |
| 16470 | } |
| 16471 | return from; |
| 16472 | } |
| 16473 | |
| 16474 | static module_state ** |
| 16475 | get_module_slot (tree name, module_state *parent, bool partition, bool insert) |
| 16476 | { |
| 16477 | module_state_hash::compare_type ct (name, uintptr_t (parent) | partition); |
| 16478 | hashval_t hv = module_state_hash::hash (c: ct); |
| 16479 | |
| 16480 | return modules_hash->find_slot_with_hash (comparable: ct, hash: hv, insert: insert ? INSERT : NO_INSERT); |
| 16481 | } |
| 16482 | |
| 16483 | static module_state * |
| 16484 | get_primary (module_state *parent) |
| 16485 | { |
| 16486 | while (parent->is_partition ()) |
| 16487 | parent = parent->parent; |
| 16488 | |
| 16489 | if (!parent->name) |
| 16490 | // Implementation unit has null name |
| 16491 | parent = parent->parent; |
| 16492 | |
| 16493 | return parent; |
| 16494 | } |
| 16495 | |
| 16496 | /* Find or create module NAME & PARENT in the hash table. */ |
| 16497 | |
| 16498 | module_state * |
| 16499 | get_module (tree name, module_state *parent, bool partition) |
| 16500 | { |
| 16501 | /* We might be given an empty NAME if preprocessing fails to handle |
| 16502 | a header-name token. */ |
| 16503 | if (name && TREE_CODE (name) == STRING_CST |
| 16504 | && TREE_STRING_LENGTH (name) == 0) |
| 16505 | return nullptr; |
| 16506 | |
| 16507 | if (partition) |
| 16508 | { |
| 16509 | if (!parent) |
| 16510 | parent = get_primary (parent: this_module ()); |
| 16511 | |
| 16512 | if (!parent->is_partition () && !parent->flatname) |
| 16513 | parent->set_flatname (); |
| 16514 | } |
| 16515 | |
| 16516 | module_state **slot = get_module_slot (name, parent, partition, insert: true); |
| 16517 | module_state *state = *slot; |
| 16518 | if (!state) |
| 16519 | { |
| 16520 | state = (new (ggc_alloc<module_state> ()) |
| 16521 | module_state (name, parent, partition)); |
| 16522 | *slot = state; |
| 16523 | } |
| 16524 | return state; |
| 16525 | } |
| 16526 | |
| 16527 | /* Process string name PTR into a module_state. */ |
| 16528 | |
| 16529 | static module_state * |
| 16530 | get_module (const char *ptr) |
| 16531 | { |
| 16532 | /* On DOS based file systems, there is an ambiguity with A:B which can be |
| 16533 | interpreted as a module Module:Partition or Drive:PATH. Interpret strings |
| 16534 | which clearly starts as pathnames as header-names and everything else is |
| 16535 | treated as a (possibly malformed) named moduled. */ |
| 16536 | if (IS_DIR_SEPARATOR (ptr[ptr[0] == '.']) // ./FOO or /FOO |
| 16537 | #if HAVE_DOS_BASED_FILE_SYSTEM |
| 16538 | || (HAS_DRIVE_SPEC (ptr) && IS_DIR_SEPARATOR (ptr[2])) // A:/FOO |
| 16539 | #endif |
| 16540 | || false) |
| 16541 | /* A header name. */ |
| 16542 | return get_module (name: build_string (strlen (s: ptr), ptr)); |
| 16543 | |
| 16544 | bool partition = false; |
| 16545 | module_state *mod = NULL; |
| 16546 | |
| 16547 | for (const char *probe = ptr;; probe++) |
| 16548 | if (!*probe || *probe == '.' || *probe == ':') |
| 16549 | { |
| 16550 | if (probe == ptr) |
| 16551 | return NULL; |
| 16552 | |
| 16553 | mod = get_module (name: get_identifier_with_length (ptr, probe - ptr), |
| 16554 | parent: mod, partition); |
| 16555 | ptr = probe; |
| 16556 | if (*ptr == ':') |
| 16557 | { |
| 16558 | if (partition) |
| 16559 | return NULL; |
| 16560 | partition = true; |
| 16561 | } |
| 16562 | |
| 16563 | if (!*ptr++) |
| 16564 | break; |
| 16565 | } |
| 16566 | else if (!(ISALPHA (*probe) || *probe == '_' |
| 16567 | || (probe != ptr && ISDIGIT (*probe)))) |
| 16568 | return NULL; |
| 16569 | |
| 16570 | return mod; |
| 16571 | } |
| 16572 | |
| 16573 | /* Create a new mapper connecting to OPTION. */ |
| 16574 | |
| 16575 | module_client * |
| 16576 | make_mapper (location_t loc, class mkdeps *deps) |
| 16577 | { |
| 16578 | timevar_start (TV_MODULE_MAPPER); |
| 16579 | const char *option = module_mapper_name; |
| 16580 | if (!option) |
| 16581 | option = getenv (name: "CXX_MODULE_MAPPER" ); |
| 16582 | |
| 16583 | mapper = module_client::open_module_client |
| 16584 | (loc, option, deps, set_repo: &set_cmi_repo, |
| 16585 | (save_decoded_options[0].opt_index == OPT_SPECIAL_program_name) |
| 16586 | && save_decoded_options[0].arg != progname |
| 16587 | ? save_decoded_options[0].arg : nullptr); |
| 16588 | |
| 16589 | timevar_stop (TV_MODULE_MAPPER); |
| 16590 | |
| 16591 | return mapper; |
| 16592 | } |
| 16593 | |
| 16594 | static unsigned lazy_snum; |
| 16595 | |
| 16596 | static bool |
| 16597 | recursive_lazy (unsigned snum = ~0u) |
| 16598 | { |
| 16599 | if (lazy_snum) |
| 16600 | { |
| 16601 | error_at (input_location, "recursive lazy load" ); |
| 16602 | return true; |
| 16603 | } |
| 16604 | |
| 16605 | lazy_snum = snum; |
| 16606 | return false; |
| 16607 | } |
| 16608 | |
| 16609 | /* If THIS has an interface dependency on itself, report an error and |
| 16610 | return false. */ |
| 16611 | |
| 16612 | bool |
| 16613 | module_state::check_circular_import (location_t from) |
| 16614 | { |
| 16615 | if (this == this_module ()) |
| 16616 | { |
| 16617 | /* Cannot import the current module. */ |
| 16618 | auto_diagnostic_group d; |
| 16619 | error_at (from, "module %qs depends on itself" , get_flatname ()); |
| 16620 | if (!header_module_p ()) |
| 16621 | inform (loc, "module %qs declared here" , get_flatname ()); |
| 16622 | return false; |
| 16623 | } |
| 16624 | return true; |
| 16625 | } |
| 16626 | |
| 16627 | /* Module name substitutions. */ |
| 16628 | static vec<module_state *,va_heap> substs; |
| 16629 | |
| 16630 | void |
| 16631 | module_state::mangle (bool include_partition) |
| 16632 | { |
| 16633 | if (subst) |
| 16634 | mangle_module_substitution (subst); |
| 16635 | else |
| 16636 | { |
| 16637 | if (parent) |
| 16638 | parent->mangle (include_partition); |
| 16639 | if (include_partition || !is_partition ()) |
| 16640 | { |
| 16641 | // Partitions are significant for global initializer |
| 16642 | // functions |
| 16643 | bool partition = is_partition () && !parent->is_partition (); |
| 16644 | subst = mangle_module_component (id: name, partition); |
| 16645 | substs.safe_push (obj: this); |
| 16646 | } |
| 16647 | } |
| 16648 | } |
| 16649 | |
| 16650 | void |
| 16651 | mangle_module (int mod, bool include_partition) |
| 16652 | { |
| 16653 | module_state *imp = (*modules)[mod]; |
| 16654 | |
| 16655 | gcc_checking_assert (!imp->is_header ()); |
| 16656 | |
| 16657 | if (!imp->name) |
| 16658 | /* Set when importing the primary module interface. */ |
| 16659 | imp = imp->parent; |
| 16660 | |
| 16661 | /* Ensure this is actually a module unit. */ |
| 16662 | gcc_checking_assert (imp); |
| 16663 | |
| 16664 | imp->mangle (include_partition); |
| 16665 | } |
| 16666 | |
| 16667 | /* Clean up substitutions. */ |
| 16668 | void |
| 16669 | mangle_module_fini () |
| 16670 | { |
| 16671 | while (substs.length ()) |
| 16672 | substs.pop ()->subst = 0; |
| 16673 | } |
| 16674 | |
| 16675 | /* Announce WHAT about the module. */ |
| 16676 | |
| 16677 | void |
| 16678 | module_state::announce (const char *what) const |
| 16679 | { |
| 16680 | if (noisy_p ()) |
| 16681 | { |
| 16682 | fprintf (stderr, format: " %s:%s" , what, get_flatname ()); |
| 16683 | fflush (stderr); |
| 16684 | } |
| 16685 | } |
| 16686 | |
| 16687 | /* A human-readable README section. The contents of this section to |
| 16688 | not contribute to the CRC, so the contents can change per |
| 16689 | compilation. That allows us to embed CWD, hostname, build time and |
| 16690 | what not. It is a STRTAB that may be extracted with: |
| 16691 | readelf -pgnu.c++.README $(module).gcm */ |
| 16692 | |
| 16693 | void |
| 16694 | module_state::write_readme (elf_out *to, cpp_reader *reader, const char *dialect) |
| 16695 | { |
| 16696 | bytes_out readme (to); |
| 16697 | |
| 16698 | readme.begin (need_crc: false); |
| 16699 | |
| 16700 | readme.printf (format: "GNU C++ %s" , |
| 16701 | is_header () ? "header unit" |
| 16702 | : !is_partition () ? "primary interface" |
| 16703 | : is_interface () ? "interface partition" |
| 16704 | : "internal partition" ); |
| 16705 | |
| 16706 | /* Compiler's version. */ |
| 16707 | readme.printf (format: "compiler: %s" , version_string); |
| 16708 | |
| 16709 | /* Module format version. */ |
| 16710 | verstr_t string; |
| 16711 | version2string (MODULE_VERSION, out&: string); |
| 16712 | readme.printf (format: "version: %s" , string); |
| 16713 | |
| 16714 | /* Module information. */ |
| 16715 | readme.printf (format: "module: %s" , get_flatname ()); |
| 16716 | readme.printf (format: "source: %s" , main_input_filename); |
| 16717 | readme.printf (format: "dialect: %s" , dialect); |
| 16718 | if (extensions) |
| 16719 | readme.printf (format: "extensions: %s%s%s" , |
| 16720 | extensions & SE_OPENMP ? "-fopenmp" |
| 16721 | : extensions & SE_OPENMP_SIMD ? "-fopenmp-simd" : "" , |
| 16722 | (extensions & SE_OPENACC) |
| 16723 | && (extensions & (SE_OPENMP | SE_OPENMP_SIMD)) |
| 16724 | ? " " : "" , |
| 16725 | extensions & SE_OPENACC ? "-fopenacc" : "" ); |
| 16726 | |
| 16727 | /* The following fields could be expected to change between |
| 16728 | otherwise identical compilations. Consider a distributed build |
| 16729 | system. We should have a way of overriding that. */ |
| 16730 | if (char *cwd = getcwd (NULL, size: 0)) |
| 16731 | { |
| 16732 | readme.printf (format: "cwd: %s" , cwd); |
| 16733 | free (ptr: cwd); |
| 16734 | } |
| 16735 | readme.printf (format: "repository: %s" , cmi_repo ? cmi_repo : "." ); |
| 16736 | #if NETWORKING |
| 16737 | { |
| 16738 | char hostname[64]; |
| 16739 | if (!gethostname (hostname, sizeof (hostname))) |
| 16740 | readme.printf ("host: %s" , hostname); |
| 16741 | } |
| 16742 | #endif |
| 16743 | { |
| 16744 | /* This of course will change! */ |
| 16745 | time_t stampy; |
| 16746 | auto kind = cpp_get_date (reader, &stampy); |
| 16747 | if (kind != CPP_time_kind::UNKNOWN) |
| 16748 | { |
| 16749 | struct tm *time; |
| 16750 | |
| 16751 | time = gmtime (timer: &stampy); |
| 16752 | readme.print_time (kind: "build" , time, tz: "UTC" ); |
| 16753 | |
| 16754 | if (kind == CPP_time_kind::DYNAMIC) |
| 16755 | { |
| 16756 | time = localtime (timer: &stampy); |
| 16757 | readme.print_time (kind: "local" , time, |
| 16758 | #if defined (__USE_MISC) || defined (__USE_BSD) /* Is there a better way? */ |
| 16759 | tz: time->tm_zone |
| 16760 | #else |
| 16761 | "" |
| 16762 | #endif |
| 16763 | ); |
| 16764 | } |
| 16765 | } |
| 16766 | } |
| 16767 | |
| 16768 | /* Its direct imports. */ |
| 16769 | for (unsigned ix = 1; ix < modules->length (); ix++) |
| 16770 | { |
| 16771 | module_state *state = (*modules)[ix]; |
| 16772 | |
| 16773 | if (state->is_direct ()) |
| 16774 | readme.printf (format: "%s: %s %s" , state->exported_p ? "export" : "import" , |
| 16775 | state->get_flatname (), state->filename); |
| 16776 | } |
| 16777 | |
| 16778 | readme.end (sink: to, name: to->name (MOD_SNAME_PFX ".README" ), NULL); |
| 16779 | } |
| 16780 | |
| 16781 | /* Sort environment var names in reverse order. */ |
| 16782 | |
| 16783 | static int |
| 16784 | env_var_cmp (const void *a_, const void *b_) |
| 16785 | { |
| 16786 | const unsigned char *a = *(const unsigned char *const *)a_; |
| 16787 | const unsigned char *b = *(const unsigned char *const *)b_; |
| 16788 | |
| 16789 | for (unsigned ix = 0; ; ix++) |
| 16790 | { |
| 16791 | bool a_end = !a[ix] || a[ix] == '='; |
| 16792 | if (a[ix] == b[ix]) |
| 16793 | { |
| 16794 | if (a_end) |
| 16795 | break; |
| 16796 | } |
| 16797 | else |
| 16798 | { |
| 16799 | bool b_end = !b[ix] || b[ix] == '='; |
| 16800 | |
| 16801 | if (!a_end && !b_end) |
| 16802 | return a[ix] < b[ix] ? +1 : -1; |
| 16803 | if (a_end && b_end) |
| 16804 | break; |
| 16805 | return a_end ? +1 : -1; |
| 16806 | } |
| 16807 | } |
| 16808 | |
| 16809 | return 0; |
| 16810 | } |
| 16811 | |
| 16812 | /* Write the environment. It is a STRTAB that may be extracted with: |
| 16813 | readelf -pgnu.c++.ENV $(module).gcm */ |
| 16814 | |
| 16815 | void |
| 16816 | module_state::write_env (elf_out *to) |
| 16817 | { |
| 16818 | vec<const char *> vars; |
| 16819 | vars.create (nelems: 20); |
| 16820 | |
| 16821 | extern char **environ; |
| 16822 | while (const char *var = environ[vars.length ()]) |
| 16823 | vars.safe_push (obj: var); |
| 16824 | vars.qsort (env_var_cmp); |
| 16825 | |
| 16826 | bytes_out env (to); |
| 16827 | env.begin (need_crc: false); |
| 16828 | while (vars.length ()) |
| 16829 | env.printf (format: "%s" , vars.pop ()); |
| 16830 | env.end (sink: to, name: to->name (MOD_SNAME_PFX ".ENV" ), NULL); |
| 16831 | |
| 16832 | vars.release (); |
| 16833 | } |
| 16834 | |
| 16835 | /* Write the direct or indirect imports. |
| 16836 | u:N |
| 16837 | { |
| 16838 | u:index |
| 16839 | s:name |
| 16840 | u32:crc |
| 16841 | s:filename (direct) |
| 16842 | u:exported (direct) |
| 16843 | } imports[N] |
| 16844 | */ |
| 16845 | |
| 16846 | void |
| 16847 | module_state::write_imports (bytes_out &sec, bool direct) |
| 16848 | { |
| 16849 | unsigned count = 0; |
| 16850 | |
| 16851 | for (unsigned ix = 1; ix < modules->length (); ix++) |
| 16852 | { |
| 16853 | module_state *imp = (*modules)[ix]; |
| 16854 | |
| 16855 | if (imp->remap && imp->is_direct () == direct) |
| 16856 | count++; |
| 16857 | } |
| 16858 | |
| 16859 | gcc_assert (!direct || count); |
| 16860 | |
| 16861 | sec.u (v: count); |
| 16862 | for (unsigned ix = 1; ix < modules->length (); ix++) |
| 16863 | { |
| 16864 | module_state *imp = (*modules)[ix]; |
| 16865 | |
| 16866 | if (imp->remap && imp->is_direct () == direct) |
| 16867 | { |
| 16868 | dump () && dump ("Writing %simport:%u->%u %M (crc=%x)" , |
| 16869 | !direct ? "indirect " |
| 16870 | : imp->exported_p ? "exported " : "" , |
| 16871 | ix, imp->remap, imp, imp->crc); |
| 16872 | sec.u (v: imp->remap); |
| 16873 | sec.str (ptr: imp->get_flatname ()); |
| 16874 | sec.u32 (val: imp->crc); |
| 16875 | if (direct) |
| 16876 | { |
| 16877 | write_location (sec, imp->imported_from ()); |
| 16878 | sec.str (ptr: imp->filename); |
| 16879 | int exportedness = 0; |
| 16880 | if (imp->exported_p) |
| 16881 | exportedness = +1; |
| 16882 | else if (!imp->is_purview_direct ()) |
| 16883 | exportedness = -1; |
| 16884 | sec.i (v: exportedness); |
| 16885 | } |
| 16886 | } |
| 16887 | } |
| 16888 | } |
| 16889 | |
| 16890 | /* READER, LMAPS != NULL == direct imports, |
| 16891 | == NUL == indirect imports. */ |
| 16892 | |
| 16893 | unsigned |
| 16894 | module_state::read_imports (bytes_in &sec, cpp_reader *reader, line_maps *lmaps) |
| 16895 | { |
| 16896 | unsigned count = sec.u (); |
| 16897 | unsigned loaded = 0; |
| 16898 | |
| 16899 | while (count--) |
| 16900 | { |
| 16901 | unsigned ix = sec.u (); |
| 16902 | if (ix >= slurp->remap->length () || !ix || (*slurp->remap)[ix]) |
| 16903 | { |
| 16904 | sec.set_overrun (); |
| 16905 | break; |
| 16906 | } |
| 16907 | |
| 16908 | const char *name = sec.str (NULL); |
| 16909 | module_state *imp = get_module (ptr: name); |
| 16910 | unsigned crc = sec.u32 (); |
| 16911 | int exportedness = 0; |
| 16912 | |
| 16913 | /* If the import is a partition, it must be the same primary |
| 16914 | module as this TU. */ |
| 16915 | if (imp && imp->is_partition () && |
| 16916 | (!named_module_p () |
| 16917 | || (get_primary (parent: this_module ()) != get_primary (parent: imp)))) |
| 16918 | imp = NULL; |
| 16919 | |
| 16920 | if (!imp) |
| 16921 | sec.set_overrun (); |
| 16922 | if (sec.get_overrun ()) |
| 16923 | break; |
| 16924 | |
| 16925 | if (lmaps) |
| 16926 | { |
| 16927 | /* A direct import, maybe load it. */ |
| 16928 | location_t floc = read_location (sec); |
| 16929 | const char *fname = sec.str (NULL); |
| 16930 | exportedness = sec.i (); |
| 16931 | |
| 16932 | if (sec.get_overrun ()) |
| 16933 | break; |
| 16934 | |
| 16935 | if (!imp->check_circular_import (from: floc)) |
| 16936 | continue; |
| 16937 | |
| 16938 | if (imp->loadedness == ML_NONE) |
| 16939 | { |
| 16940 | imp->loc = floc; |
| 16941 | imp->crc = crc; |
| 16942 | if (!imp->get_flatname ()) |
| 16943 | imp->set_flatname (); |
| 16944 | |
| 16945 | unsigned n = dump.push (m: imp); |
| 16946 | |
| 16947 | if (!imp->filename && fname) |
| 16948 | imp->filename = xstrdup (fname); |
| 16949 | |
| 16950 | if (imp->is_partition ()) |
| 16951 | dump () && dump ("Importing elided partition %M" , imp); |
| 16952 | |
| 16953 | if (!imp->do_import (reader, outermost: false)) |
| 16954 | imp = NULL; |
| 16955 | dump.pop (n); |
| 16956 | if (!imp) |
| 16957 | continue; |
| 16958 | } |
| 16959 | |
| 16960 | if (is_partition ()) |
| 16961 | { |
| 16962 | if (!imp->is_direct ()) |
| 16963 | imp->directness = MD_PARTITION_DIRECT; |
| 16964 | if (exportedness > 0) |
| 16965 | imp->exported_p = true; |
| 16966 | } |
| 16967 | } |
| 16968 | else |
| 16969 | { |
| 16970 | /* An indirect import, find it, it should already be here. */ |
| 16971 | if (imp->loadedness == ML_NONE) |
| 16972 | { |
| 16973 | error_at (loc, "indirect import %qs is not already loaded" , name); |
| 16974 | continue; |
| 16975 | } |
| 16976 | } |
| 16977 | |
| 16978 | if (imp->crc != crc) |
| 16979 | error_at (loc, "import %qs has CRC mismatch" , imp->get_flatname ()); |
| 16980 | |
| 16981 | (*slurp->remap)[ix] = (imp->mod << 1) | (lmaps != NULL); |
| 16982 | |
| 16983 | if (lmaps && exportedness >= 0) |
| 16984 | set_import (imp, is_export: bool (exportedness)); |
| 16985 | dump () && dump ("Found %simport:%u %M->%u" , !lmaps ? "indirect " |
| 16986 | : exportedness > 0 ? "exported " |
| 16987 | : exportedness < 0 ? "gmf" : "" , ix, imp, |
| 16988 | imp->mod); |
| 16989 | loaded++; |
| 16990 | } |
| 16991 | |
| 16992 | return loaded; |
| 16993 | } |
| 16994 | |
| 16995 | /* Write the import table to MOD_SNAME_PFX.imp. */ |
| 16996 | |
| 16997 | void |
| 16998 | module_state::write_imports (elf_out *to, unsigned *crc_ptr) |
| 16999 | { |
| 17000 | dump () && dump ("Writing imports" ); |
| 17001 | dump.indent (); |
| 17002 | |
| 17003 | bytes_out sec (to); |
| 17004 | sec.begin (); |
| 17005 | |
| 17006 | write_imports (sec, direct: true); |
| 17007 | write_imports (sec, direct: false); |
| 17008 | |
| 17009 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".imp" ), crc_ptr); |
| 17010 | dump.outdent (); |
| 17011 | } |
| 17012 | |
| 17013 | bool |
| 17014 | module_state::read_imports (cpp_reader *reader, line_maps *lmaps) |
| 17015 | { |
| 17016 | bytes_in sec; |
| 17017 | |
| 17018 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".imp" )) |
| 17019 | return false; |
| 17020 | |
| 17021 | dump () && dump ("Reading %u imports" , slurp->remap->length () - 1); |
| 17022 | dump.indent (); |
| 17023 | |
| 17024 | /* Read the imports. */ |
| 17025 | unsigned direct = read_imports (sec, reader, lmaps); |
| 17026 | unsigned indirect = read_imports (sec, NULL, NULL); |
| 17027 | if (direct + indirect + 1 != slurp->remap->length ()) |
| 17028 | from ()->set_error (elf::E_BAD_IMPORT); |
| 17029 | |
| 17030 | dump.outdent (); |
| 17031 | if (!sec.end (src: from ())) |
| 17032 | return false; |
| 17033 | return true; |
| 17034 | } |
| 17035 | |
| 17036 | /* We're the primary module interface, but have partitions. Document |
| 17037 | them so that non-partition module implementation units know which |
| 17038 | have already been loaded. */ |
| 17039 | |
| 17040 | void |
| 17041 | module_state::write_partitions (elf_out *to, unsigned count, unsigned *crc_ptr) |
| 17042 | { |
| 17043 | dump () && dump ("Writing %u elided partitions" , count); |
| 17044 | dump.indent (); |
| 17045 | |
| 17046 | bytes_out sec (to); |
| 17047 | sec.begin (); |
| 17048 | |
| 17049 | for (unsigned ix = 1; ix != modules->length (); ix++) |
| 17050 | { |
| 17051 | module_state *imp = (*modules)[ix]; |
| 17052 | if (imp->is_partition ()) |
| 17053 | { |
| 17054 | dump () && dump ("Writing elided partition %M (crc=%x)" , |
| 17055 | imp, imp->crc); |
| 17056 | sec.str (ptr: imp->get_flatname ()); |
| 17057 | sec.u32 (val: imp->crc); |
| 17058 | write_location (sec, imp->is_direct () |
| 17059 | ? imp->imported_from () : UNKNOWN_LOCATION); |
| 17060 | sec.str (ptr: imp->filename); |
| 17061 | } |
| 17062 | } |
| 17063 | |
| 17064 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".prt" ), crc_ptr); |
| 17065 | dump.outdent (); |
| 17066 | } |
| 17067 | |
| 17068 | bool |
| 17069 | module_state::read_partitions (unsigned count) |
| 17070 | { |
| 17071 | bytes_in sec; |
| 17072 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".prt" )) |
| 17073 | return false; |
| 17074 | |
| 17075 | dump () && dump ("Reading %u elided partitions" , count); |
| 17076 | dump.indent (); |
| 17077 | |
| 17078 | while (count--) |
| 17079 | { |
| 17080 | const char *name = sec.str (NULL); |
| 17081 | unsigned crc = sec.u32 (); |
| 17082 | location_t floc = read_location (sec); |
| 17083 | const char *fname = sec.str (NULL); |
| 17084 | |
| 17085 | if (sec.get_overrun ()) |
| 17086 | break; |
| 17087 | |
| 17088 | dump () && dump ("Reading elided partition %s (crc=%x)" , name, crc); |
| 17089 | |
| 17090 | module_state *imp = get_module (ptr: name); |
| 17091 | if (!imp /* Partition should be ... */ |
| 17092 | || !imp->is_partition () /* a partition ... */ |
| 17093 | || imp->loadedness != ML_NONE /* that is not yet loaded ... */ |
| 17094 | || get_primary (parent: imp) != this) /* whose primary is this. */ |
| 17095 | { |
| 17096 | sec.set_overrun (); |
| 17097 | break; |
| 17098 | } |
| 17099 | |
| 17100 | if (!imp->has_location ()) |
| 17101 | imp->loc = floc; |
| 17102 | imp->crc = crc; |
| 17103 | if (!imp->filename && fname[0]) |
| 17104 | imp->filename = xstrdup (fname); |
| 17105 | } |
| 17106 | |
| 17107 | dump.outdent (); |
| 17108 | if (!sec.end (src: from ())) |
| 17109 | return false; |
| 17110 | return true; |
| 17111 | } |
| 17112 | |
| 17113 | /* Data for config reading and writing. */ |
| 17114 | struct module_state_config { |
| 17115 | const char *dialect_str = get_dialect (); |
| 17116 | line_map_uint_t ordinary_locs = 0; |
| 17117 | line_map_uint_t macro_locs = 0; |
| 17118 | unsigned num_imports = 0; |
| 17119 | unsigned num_partitions = 0; |
| 17120 | unsigned num_entities = 0; |
| 17121 | unsigned loc_range_bits = 0; |
| 17122 | unsigned active_init = 0; |
| 17123 | |
| 17124 | static void release () |
| 17125 | { |
| 17126 | XDELETEVEC (dialect); |
| 17127 | dialect = NULL; |
| 17128 | } |
| 17129 | |
| 17130 | private: |
| 17131 | static const char *get_dialect (); |
| 17132 | static char *dialect; |
| 17133 | }; |
| 17134 | |
| 17135 | char *module_state_config::dialect; |
| 17136 | |
| 17137 | /* Generate a string of the significant compilation options. |
| 17138 | Generally assume the user knows what they're doing, in the same way |
| 17139 | that object files can be mixed. */ |
| 17140 | |
| 17141 | const char * |
| 17142 | module_state_config::get_dialect () |
| 17143 | { |
| 17144 | if (!dialect) |
| 17145 | dialect = concat (get_cxx_dialect_name (dialect: cxx_dialect), |
| 17146 | /* C++ implies these, only show if disabled. */ |
| 17147 | flag_exceptions ? "" : "/no-exceptions" , |
| 17148 | flag_rtti ? "" : "/no-rtti" , |
| 17149 | flag_new_inheriting_ctors ? "" : "/old-inheriting-ctors" , |
| 17150 | /* C++ 20 implies concepts and coroutines. */ |
| 17151 | cxx_dialect < cxx20 && flag_concepts ? "/concepts" : "" , |
| 17152 | (cxx_dialect < cxx20 && flag_coroutines |
| 17153 | ? "/coroutines" : "" ), |
| 17154 | flag_module_implicit_inline ? "/implicit-inline" : "" , |
| 17155 | flag_contracts ? "/contracts" : "" , |
| 17156 | NULL); |
| 17157 | |
| 17158 | return dialect; |
| 17159 | } |
| 17160 | |
| 17161 | /* Contents of a cluster. */ |
| 17162 | enum cluster_tag { |
| 17163 | ct_decl, /* A decl. */ |
| 17164 | ct_defn, /* A definition. */ |
| 17165 | ct_bind, /* A binding. */ |
| 17166 | ct_hwm |
| 17167 | }; |
| 17168 | |
| 17169 | /* Binding modifiers. */ |
| 17170 | enum ct_bind_flags |
| 17171 | { |
| 17172 | cbf_export = 0x1, /* An exported decl. */ |
| 17173 | cbf_hidden = 0x2, /* A hidden (friend) decl. */ |
| 17174 | cbf_using = 0x4, /* A using decl. */ |
| 17175 | cbf_internal = 0x8, /* A TU-local decl. */ |
| 17176 | }; |
| 17177 | |
| 17178 | /* DEP belongs to a different cluster, seed it to prevent |
| 17179 | unfortunately timed duplicate import. */ |
| 17180 | // FIXME: QOI For inter-cluster references we could just only pick |
| 17181 | // one entity from an earlier cluster. Even better track |
| 17182 | // dependencies between earlier clusters |
| 17183 | |
| 17184 | void |
| 17185 | module_state::intercluster_seed (trees_out &sec, unsigned index_hwm, depset *dep) |
| 17186 | { |
| 17187 | if (dep->is_tu_local ()) |
| 17188 | /* We only stream placeholders for TU-local entities anyway. */; |
| 17189 | else if (dep->is_import () || dep->cluster < index_hwm) |
| 17190 | { |
| 17191 | tree ent = dep->get_entity (); |
| 17192 | if (!TREE_VISITED (ent)) |
| 17193 | { |
| 17194 | sec.tree_node (t: ent); |
| 17195 | dump (dumper::CLUSTER) |
| 17196 | && dump ("Seeded %s %N" , |
| 17197 | dep->is_import () ? "import" : "intercluster" , ent); |
| 17198 | } |
| 17199 | } |
| 17200 | } |
| 17201 | |
| 17202 | /* Write the cluster of depsets in SCC[0-SIZE). |
| 17203 | dep->section -> section number |
| 17204 | dep->cluster -> entity number |
| 17205 | */ |
| 17206 | |
| 17207 | unsigned |
| 17208 | module_state::write_cluster (elf_out *to, depset *scc[], unsigned size, |
| 17209 | depset::hash &table, unsigned *counts, |
| 17210 | unsigned *crc_ptr) |
| 17211 | { |
| 17212 | dump () && dump ("Writing section:%u %u depsets" , table.section, size); |
| 17213 | dump.indent (); |
| 17214 | |
| 17215 | trees_out sec (to, this, table, table.section); |
| 17216 | sec.begin (); |
| 17217 | unsigned index_lwm = counts[MSC_entities]; |
| 17218 | |
| 17219 | /* Determine entity numbers, mark for writing. */ |
| 17220 | dump (dumper::CLUSTER) && dump ("Cluster members:" ) && (dump.indent (), true); |
| 17221 | for (unsigned ix = 0; ix != size; ix++) |
| 17222 | { |
| 17223 | depset *b = scc[ix]; |
| 17224 | |
| 17225 | switch (b->get_entity_kind ()) |
| 17226 | { |
| 17227 | default: |
| 17228 | gcc_unreachable (); |
| 17229 | |
| 17230 | case depset::EK_BINDING: |
| 17231 | { |
| 17232 | dump (dumper::CLUSTER) |
| 17233 | && dump ("[%u]=%s %P" , ix, b->entity_kind_name (), |
| 17234 | b->get_entity (), b->get_name ()); |
| 17235 | depset *ns_dep = b->deps[0]; |
| 17236 | gcc_checking_assert (ns_dep->get_entity_kind () |
| 17237 | == depset::EK_NAMESPACE |
| 17238 | && ns_dep->get_entity () == b->get_entity ()); |
| 17239 | for (unsigned jx = b->deps.length (); --jx;) |
| 17240 | { |
| 17241 | depset *dep = b->deps[jx]; |
| 17242 | // We could be declaring something that is also a |
| 17243 | // (merged) import |
| 17244 | gcc_checking_assert (dep->is_import () |
| 17245 | || TREE_VISITED (dep->get_entity ()) |
| 17246 | || (dep->get_entity_kind () |
| 17247 | == depset::EK_USING) |
| 17248 | || (dep->get_entity_kind () |
| 17249 | == depset::EK_TU_LOCAL)); |
| 17250 | } |
| 17251 | } |
| 17252 | break; |
| 17253 | |
| 17254 | case depset::EK_DECL: |
| 17255 | case depset::EK_SPECIALIZATION: |
| 17256 | case depset::EK_PARTIAL: |
| 17257 | b->cluster = counts[MSC_entities]++; |
| 17258 | sec.mark_declaration (decl: b->get_entity (), do_defn: b->has_defn ()); |
| 17259 | /* FALLTHROUGH */ |
| 17260 | |
| 17261 | case depset::EK_USING: |
| 17262 | case depset::EK_TU_LOCAL: |
| 17263 | gcc_checking_assert (!b->is_import () |
| 17264 | && !b->is_unreached ()); |
| 17265 | dump (dumper::CLUSTER) |
| 17266 | && dump ("[%u]=%s %s %N" , ix, b->entity_kind_name (), |
| 17267 | b->has_defn () ? "definition" : "declaration" , |
| 17268 | b->get_entity ()); |
| 17269 | break; |
| 17270 | } |
| 17271 | } |
| 17272 | dump (dumper::CLUSTER) && (dump.outdent (), true); |
| 17273 | |
| 17274 | /* Ensure every out-of-cluster decl is referenced before we start |
| 17275 | streaming. We must do both imports *and* earlier clusters, |
| 17276 | because the latter could reach into the former and cause a |
| 17277 | duplicate loop. */ |
| 17278 | sec.set_importing (+1); |
| 17279 | for (unsigned ix = 0; ix != size; ix++) |
| 17280 | { |
| 17281 | depset *b = scc[ix]; |
| 17282 | for (unsigned jx = b->is_special (); jx != b->deps.length (); jx++) |
| 17283 | { |
| 17284 | depset *dep = b->deps[jx]; |
| 17285 | |
| 17286 | if (dep->is_binding ()) |
| 17287 | { |
| 17288 | for (unsigned ix = dep->deps.length (); --ix;) |
| 17289 | { |
| 17290 | depset *bind = dep->deps[ix]; |
| 17291 | if (bind->get_entity_kind () == depset::EK_USING) |
| 17292 | bind = bind->deps[1]; |
| 17293 | |
| 17294 | intercluster_seed (sec, index_hwm: index_lwm, dep: bind); |
| 17295 | } |
| 17296 | /* Also check the namespace itself. */ |
| 17297 | dep = dep->deps[0]; |
| 17298 | } |
| 17299 | |
| 17300 | intercluster_seed (sec, index_hwm: index_lwm, dep); |
| 17301 | } |
| 17302 | } |
| 17303 | sec.tree_node (NULL_TREE); |
| 17304 | /* We're done importing now. */ |
| 17305 | sec.set_importing (-1); |
| 17306 | |
| 17307 | /* Write non-definitions. */ |
| 17308 | for (unsigned ix = 0; ix != size; ix++) |
| 17309 | { |
| 17310 | depset *b = scc[ix]; |
| 17311 | tree decl = b->get_entity (); |
| 17312 | switch (b->get_entity_kind ()) |
| 17313 | { |
| 17314 | default: |
| 17315 | gcc_unreachable (); |
| 17316 | break; |
| 17317 | |
| 17318 | case depset::EK_BINDING: |
| 17319 | { |
| 17320 | gcc_assert (TREE_CODE (decl) == NAMESPACE_DECL); |
| 17321 | dump () && dump ("Depset:%u binding %C:%P" , ix, TREE_CODE (decl), |
| 17322 | decl, b->get_name ()); |
| 17323 | sec.u (v: ct_bind); |
| 17324 | sec.tree_node (t: decl); |
| 17325 | sec.tree_node (t: b->get_name ()); |
| 17326 | |
| 17327 | /* Write in reverse order, so reading will see the exports |
| 17328 | first, thus building the overload chain will be |
| 17329 | optimized. */ |
| 17330 | for (unsigned jx = b->deps.length (); --jx;) |
| 17331 | { |
| 17332 | depset *dep = b->deps[jx]; |
| 17333 | tree bound = dep->get_entity (); |
| 17334 | unsigned flags = 0; |
| 17335 | if (dep->get_entity_kind () == depset::EK_TU_LOCAL) |
| 17336 | flags |= cbf_internal; |
| 17337 | else if (dep->get_entity_kind () == depset::EK_USING) |
| 17338 | { |
| 17339 | tree ovl = bound; |
| 17340 | bound = OVL_FUNCTION (bound); |
| 17341 | if (!(TREE_CODE (bound) == CONST_DECL |
| 17342 | && UNSCOPED_ENUM_P (TREE_TYPE (bound)) |
| 17343 | && decl == TYPE_NAME (TREE_TYPE (bound)))) |
| 17344 | /* An unscoped enumerator in its enumeration's |
| 17345 | scope is not a using. */ |
| 17346 | flags |= cbf_using; |
| 17347 | if (OVL_EXPORT_P (ovl)) |
| 17348 | flags |= cbf_export; |
| 17349 | } |
| 17350 | else |
| 17351 | { |
| 17352 | /* An implicit typedef must be at one. */ |
| 17353 | gcc_assert (!DECL_IMPLICIT_TYPEDEF_P (bound) || jx == 1); |
| 17354 | if (dep->is_hidden ()) |
| 17355 | flags |= cbf_hidden; |
| 17356 | else if (DECL_MODULE_EXPORT_P (STRIP_TEMPLATE (bound))) |
| 17357 | flags |= cbf_export; |
| 17358 | } |
| 17359 | |
| 17360 | gcc_checking_assert (DECL_P (bound)); |
| 17361 | |
| 17362 | sec.i (v: flags); |
| 17363 | if (flags & cbf_internal) |
| 17364 | { |
| 17365 | sec.tree_node (t: name_for_tu_local_decl (t: bound)); |
| 17366 | write_location (sec, DECL_SOURCE_LOCATION (bound)); |
| 17367 | } |
| 17368 | else |
| 17369 | sec.tree_node (t: bound); |
| 17370 | } |
| 17371 | |
| 17372 | /* Terminate the list. */ |
| 17373 | sec.i (v: -1); |
| 17374 | } |
| 17375 | break; |
| 17376 | |
| 17377 | case depset::EK_USING: |
| 17378 | case depset::EK_TU_LOCAL: |
| 17379 | dump () && dump ("Depset:%u %s %C:%N" , ix, b->entity_kind_name (), |
| 17380 | TREE_CODE (decl), decl); |
| 17381 | break; |
| 17382 | |
| 17383 | case depset::EK_SPECIALIZATION: |
| 17384 | case depset::EK_PARTIAL: |
| 17385 | case depset::EK_DECL: |
| 17386 | dump () && dump ("Depset:%u %s entity:%u %C:%N" , ix, |
| 17387 | b->entity_kind_name (), b->cluster, |
| 17388 | TREE_CODE (decl), decl); |
| 17389 | |
| 17390 | sec.u (v: ct_decl); |
| 17391 | sec.tree_node (t: decl); |
| 17392 | |
| 17393 | dump () && dump ("Wrote declaration entity:%u %C:%N" , |
| 17394 | b->cluster, TREE_CODE (decl), decl); |
| 17395 | break; |
| 17396 | } |
| 17397 | } |
| 17398 | |
| 17399 | depset *namer = NULL; |
| 17400 | |
| 17401 | /* Write out definitions */ |
| 17402 | for (unsigned ix = 0; ix != size; ix++) |
| 17403 | { |
| 17404 | depset *b = scc[ix]; |
| 17405 | tree decl = b->get_entity (); |
| 17406 | switch (b->get_entity_kind ()) |
| 17407 | { |
| 17408 | default: |
| 17409 | break; |
| 17410 | |
| 17411 | case depset::EK_SPECIALIZATION: |
| 17412 | case depset::EK_PARTIAL: |
| 17413 | case depset::EK_DECL: |
| 17414 | if (!namer) |
| 17415 | namer = b; |
| 17416 | |
| 17417 | if (b->has_defn ()) |
| 17418 | { |
| 17419 | sec.u (v: ct_defn); |
| 17420 | sec.tree_node (t: decl); |
| 17421 | dump () && dump ("Writing definition %N" , decl); |
| 17422 | sec.write_definition (decl, refs_tu_local: b->refs_tu_local ()); |
| 17423 | |
| 17424 | if (!namer->has_defn ()) |
| 17425 | namer = b; |
| 17426 | } |
| 17427 | break; |
| 17428 | } |
| 17429 | } |
| 17430 | |
| 17431 | /* We don't find the section by name. Use depset's decl's name for |
| 17432 | human friendliness. */ |
| 17433 | unsigned name = 0; |
| 17434 | tree naming_decl = NULL_TREE; |
| 17435 | if (namer) |
| 17436 | { |
| 17437 | naming_decl = namer->get_entity (); |
| 17438 | if (namer->get_entity_kind () == depset::EK_USING) |
| 17439 | /* This unfortunately names the section from the target of the |
| 17440 | using decl. But the name is only a guide, so Do Not Care. */ |
| 17441 | naming_decl = OVL_FUNCTION (naming_decl); |
| 17442 | if (DECL_IMPLICIT_TYPEDEF_P (naming_decl)) |
| 17443 | /* Lose any anonymousness. */ |
| 17444 | naming_decl = TYPE_NAME (TREE_TYPE (naming_decl)); |
| 17445 | name = to->qualified_name (decl: naming_decl, is_defn: namer->has_defn ()); |
| 17446 | } |
| 17447 | |
| 17448 | unsigned bytes = sec.pos; |
| 17449 | unsigned snum = sec.end (sink: to, name, crc_ptr); |
| 17450 | |
| 17451 | for (unsigned ix = size; ix--;) |
| 17452 | gcc_checking_assert (scc[ix]->section == snum); |
| 17453 | |
| 17454 | dump.outdent (); |
| 17455 | dump () && dump ("Wrote section:%u named-by:%N" , table.section, naming_decl); |
| 17456 | |
| 17457 | return bytes; |
| 17458 | } |
| 17459 | |
| 17460 | /* Read a cluster from section SNUM. */ |
| 17461 | |
| 17462 | bool |
| 17463 | module_state::read_cluster (unsigned snum) |
| 17464 | { |
| 17465 | trees_in sec (this); |
| 17466 | |
| 17467 | if (!sec.begin (loc, source: from (), snum)) |
| 17468 | return false; |
| 17469 | |
| 17470 | dump () && dump ("Reading section:%u" , snum); |
| 17471 | dump.indent (); |
| 17472 | |
| 17473 | /* We care about structural equality. */ |
| 17474 | comparing_dependent_aliases++; |
| 17475 | |
| 17476 | /* First seed the imports. */ |
| 17477 | while (tree import = sec.tree_node ()) |
| 17478 | dump (dumper::CLUSTER) && dump ("Seeded import %N" , import); |
| 17479 | |
| 17480 | while (!sec.get_overrun () && sec.more_p ()) |
| 17481 | { |
| 17482 | unsigned ct = sec.u (); |
| 17483 | switch (ct) |
| 17484 | { |
| 17485 | default: |
| 17486 | sec.set_overrun (); |
| 17487 | break; |
| 17488 | |
| 17489 | case ct_bind: |
| 17490 | /* A set of namespace bindings. */ |
| 17491 | { |
| 17492 | tree ns = sec.tree_node (); |
| 17493 | tree name = sec.tree_node (); |
| 17494 | tree decls = NULL_TREE; |
| 17495 | tree visible = NULL_TREE; |
| 17496 | tree internal = NULL_TREE; |
| 17497 | tree type = NULL_TREE; |
| 17498 | bool dedup = false; |
| 17499 | bool global_p = is_header (); |
| 17500 | |
| 17501 | /* We rely on the bindings being in the reverse order of |
| 17502 | the resulting overload set. */ |
| 17503 | for (;;) |
| 17504 | { |
| 17505 | int flags = sec.i (); |
| 17506 | if (flags < 0) |
| 17507 | break; |
| 17508 | |
| 17509 | if ((flags & cbf_hidden) |
| 17510 | && (flags & (cbf_using | cbf_export))) |
| 17511 | sec.set_overrun (); |
| 17512 | if ((flags & cbf_internal) |
| 17513 | && flags != cbf_internal) |
| 17514 | sec.set_overrun (); |
| 17515 | |
| 17516 | if (flags & cbf_internal) |
| 17517 | { |
| 17518 | tree name = sec.tree_node (); |
| 17519 | location_t loc = read_location (sec); |
| 17520 | if (sec.get_overrun ()) |
| 17521 | break; |
| 17522 | |
| 17523 | tree decl = make_node (TU_LOCAL_ENTITY); |
| 17524 | TU_LOCAL_ENTITY_NAME (decl) = name; |
| 17525 | TU_LOCAL_ENTITY_LOCATION (decl) = loc; |
| 17526 | internal = tree_cons (NULL_TREE, decl, internal); |
| 17527 | continue; |
| 17528 | } |
| 17529 | |
| 17530 | tree decl = sec.tree_node (); |
| 17531 | if (sec.get_overrun ()) |
| 17532 | break; |
| 17533 | |
| 17534 | if (!global_p) |
| 17535 | { |
| 17536 | /* Check if the decl could require GM merging. */ |
| 17537 | tree orig = get_originating_module_decl (decl); |
| 17538 | tree inner = STRIP_TEMPLATE (orig); |
| 17539 | if (!DECL_LANG_SPECIFIC (inner) |
| 17540 | || !DECL_MODULE_ATTACH_P (inner)) |
| 17541 | global_p = true; |
| 17542 | } |
| 17543 | |
| 17544 | if (decls && TREE_CODE (decl) == TYPE_DECL) |
| 17545 | { |
| 17546 | /* Stat hack. */ |
| 17547 | if (type || !DECL_IMPLICIT_TYPEDEF_P (decl)) |
| 17548 | sec.set_overrun (); |
| 17549 | |
| 17550 | if (flags & cbf_using) |
| 17551 | { |
| 17552 | type = build_lang_decl_loc (UNKNOWN_LOCATION, |
| 17553 | USING_DECL, |
| 17554 | DECL_NAME (decl), |
| 17555 | NULL_TREE); |
| 17556 | USING_DECL_DECLS (type) = decl; |
| 17557 | USING_DECL_SCOPE (type) = CP_DECL_CONTEXT (decl); |
| 17558 | DECL_CONTEXT (type) = ns; |
| 17559 | |
| 17560 | DECL_MODULE_PURVIEW_P (type) = true; |
| 17561 | if (flags & cbf_export) |
| 17562 | DECL_MODULE_EXPORT_P (type) = true; |
| 17563 | } |
| 17564 | else |
| 17565 | type = decl; |
| 17566 | } |
| 17567 | else |
| 17568 | { |
| 17569 | if ((flags & cbf_using) && |
| 17570 | !DECL_DECLARES_FUNCTION_P (decl)) |
| 17571 | { |
| 17572 | /* We should only see a single non-function using-decl |
| 17573 | for a binding; more than that would clash. */ |
| 17574 | if (decls) |
| 17575 | sec.set_overrun (); |
| 17576 | |
| 17577 | /* FIXME: Propagate the location of the using-decl |
| 17578 | for use in diagnostics. */ |
| 17579 | decls = build_lang_decl_loc (UNKNOWN_LOCATION, |
| 17580 | USING_DECL, |
| 17581 | DECL_NAME (decl), |
| 17582 | NULL_TREE); |
| 17583 | USING_DECL_DECLS (decls) = decl; |
| 17584 | /* We don't currently record the actual scope of the |
| 17585 | using-declaration, but this approximation should |
| 17586 | generally be good enough. */ |
| 17587 | USING_DECL_SCOPE (decls) = CP_DECL_CONTEXT (decl); |
| 17588 | DECL_CONTEXT (decls) = ns; |
| 17589 | |
| 17590 | DECL_MODULE_PURVIEW_P (decls) = true; |
| 17591 | if (flags & cbf_export) |
| 17592 | DECL_MODULE_EXPORT_P (decls) = true; |
| 17593 | } |
| 17594 | else if (decls |
| 17595 | || (flags & (cbf_hidden | cbf_using)) |
| 17596 | || DECL_FUNCTION_TEMPLATE_P (decl)) |
| 17597 | { |
| 17598 | decls = ovl_make (fn: decl, next: decls); |
| 17599 | if (flags & cbf_using) |
| 17600 | { |
| 17601 | dedup = true; |
| 17602 | OVL_USING_P (decls) = true; |
| 17603 | OVL_PURVIEW_P (decls) = true; |
| 17604 | if (flags & cbf_export) |
| 17605 | OVL_EXPORT_P (decls) = true; |
| 17606 | } |
| 17607 | |
| 17608 | if (flags & cbf_hidden) |
| 17609 | OVL_HIDDEN_P (decls) = true; |
| 17610 | else if (dedup) |
| 17611 | OVL_DEDUP_P (decls) = true; |
| 17612 | } |
| 17613 | else |
| 17614 | decls = decl; |
| 17615 | |
| 17616 | if (flags & cbf_export |
| 17617 | || (!(flags & cbf_hidden) |
| 17618 | && (is_module () || is_partition ()))) |
| 17619 | visible = decls; |
| 17620 | } |
| 17621 | } |
| 17622 | |
| 17623 | if (!decls && !internal) |
| 17624 | sec.set_overrun (); |
| 17625 | |
| 17626 | if (sec.get_overrun ()) |
| 17627 | break; /* Bail. */ |
| 17628 | |
| 17629 | dump () && dump ("Binding of %P" , ns, name); |
| 17630 | if (!set_module_binding (ctx: ns, name, mod, global_p, |
| 17631 | partition_p: is_module () || is_partition (), |
| 17632 | value: decls, type, visible, internal)) |
| 17633 | sec.set_overrun (); |
| 17634 | } |
| 17635 | break; |
| 17636 | |
| 17637 | case ct_decl: |
| 17638 | /* A decl. */ |
| 17639 | { |
| 17640 | tree decl = sec.tree_node (); |
| 17641 | dump () && dump ("Read declaration of %N" , decl); |
| 17642 | } |
| 17643 | break; |
| 17644 | |
| 17645 | case ct_defn: |
| 17646 | { |
| 17647 | tree decl = sec.tree_node (); |
| 17648 | dump () && dump ("Reading definition of %N" , decl); |
| 17649 | sec.read_definition (decl); |
| 17650 | } |
| 17651 | break; |
| 17652 | } |
| 17653 | } |
| 17654 | |
| 17655 | /* When lazy loading is in effect, we can be in the middle of |
| 17656 | parsing or instantiating a function. Save it away. |
| 17657 | push_function_context does too much work. */ |
| 17658 | tree old_cfd = current_function_decl; |
| 17659 | struct function *old_cfun = cfun; |
| 17660 | for (const post_process_data& pdata : sec.post_process ()) |
| 17661 | { |
| 17662 | tree decl = pdata.decl; |
| 17663 | |
| 17664 | bool abstract = false; |
| 17665 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
| 17666 | { |
| 17667 | abstract = true; |
| 17668 | decl = DECL_TEMPLATE_RESULT (decl); |
| 17669 | } |
| 17670 | |
| 17671 | current_function_decl = decl; |
| 17672 | allocate_struct_function (decl, abstract); |
| 17673 | cfun->language = ggc_cleared_alloc<language_function> (); |
| 17674 | cfun->language->base.x_stmt_tree.stmts_are_full_exprs_p = 1; |
| 17675 | cfun->function_start_locus = pdata.start_locus; |
| 17676 | cfun->function_end_locus = pdata.end_locus; |
| 17677 | cfun->language->returns_value = pdata.returns_value; |
| 17678 | cfun->language->returns_null = pdata.returns_null; |
| 17679 | cfun->language->returns_abnormally = pdata.returns_abnormally; |
| 17680 | cfun->language->infinite_loop = pdata.infinite_loop; |
| 17681 | cfun->coroutine_component = DECL_COROUTINE_P (decl); |
| 17682 | |
| 17683 | /* Make sure we emit explicit instantiations. |
| 17684 | FIXME do we want to do this in expand_or_defer_fn instead? */ |
| 17685 | if (DECL_EXPLICIT_INSTANTIATION (decl) |
| 17686 | && !DECL_EXTERNAL (decl)) |
| 17687 | setup_explicit_instantiation_definition_linkage (decl); |
| 17688 | |
| 17689 | if (abstract) |
| 17690 | ; |
| 17691 | else if (DECL_MAYBE_IN_CHARGE_CDTOR_P (decl)) |
| 17692 | vec_safe_push (v&: post_load_decls, obj: decl); |
| 17693 | else |
| 17694 | { |
| 17695 | bool aggr = aggregate_value_p (DECL_RESULT (decl), decl); |
| 17696 | #ifdef PCC_STATIC_STRUCT_RETURN |
| 17697 | cfun->returns_pcc_struct = aggr; |
| 17698 | #endif |
| 17699 | cfun->returns_struct = aggr; |
| 17700 | expand_or_defer_fn (decl); |
| 17701 | |
| 17702 | /* If we first see this function after at_eof, it doesn't get |
| 17703 | note_vague_linkage_fn from tentative_decl_linkage, so the loop in |
| 17704 | c_parse_final_cleanups won't consider it. But with DECL_COMDAT we |
| 17705 | can just clear DECL_EXTERNAL and let cgraph decide. |
| 17706 | FIXME handle this outside module.cc after GCC 15. */ |
| 17707 | if (at_eof && DECL_COMDAT (decl) && DECL_EXTERNAL (decl) |
| 17708 | && DECL_NOT_REALLY_EXTERN (decl)) |
| 17709 | DECL_EXTERNAL (decl) = false; |
| 17710 | } |
| 17711 | |
| 17712 | } |
| 17713 | for (const tree& type : sec.post_process_type ()) |
| 17714 | { |
| 17715 | /* Attempt to complete an array type now in case its element type |
| 17716 | had a definition streamed later in the cluster. */ |
| 17717 | gcc_checking_assert (TREE_CODE (type) == ARRAY_TYPE); |
| 17718 | complete_type (type); |
| 17719 | } |
| 17720 | set_cfun (new_cfun: old_cfun); |
| 17721 | current_function_decl = old_cfd; |
| 17722 | comparing_dependent_aliases--; |
| 17723 | |
| 17724 | dump.outdent (); |
| 17725 | dump () && dump ("Read section:%u" , snum); |
| 17726 | |
| 17727 | loaded_clusters++; |
| 17728 | |
| 17729 | if (!sec.end (src: from ())) |
| 17730 | return false; |
| 17731 | |
| 17732 | return true; |
| 17733 | } |
| 17734 | |
| 17735 | void |
| 17736 | module_state::write_namespace (bytes_out &sec, depset *dep) |
| 17737 | { |
| 17738 | unsigned ns_num = dep->cluster; |
| 17739 | unsigned ns_import = 0; |
| 17740 | |
| 17741 | if (dep->is_import ()) |
| 17742 | ns_import = dep->section; |
| 17743 | else if (dep->get_entity () != global_namespace) |
| 17744 | ns_num++; |
| 17745 | |
| 17746 | sec.u (v: ns_import); |
| 17747 | sec.u (v: ns_num); |
| 17748 | } |
| 17749 | |
| 17750 | tree |
| 17751 | module_state::read_namespace (bytes_in &sec) |
| 17752 | { |
| 17753 | unsigned ns_import = sec.u (); |
| 17754 | unsigned ns_num = sec.u (); |
| 17755 | tree ns = NULL_TREE; |
| 17756 | |
| 17757 | if (ns_import || ns_num) |
| 17758 | { |
| 17759 | if (!ns_import) |
| 17760 | ns_num--; |
| 17761 | |
| 17762 | if (unsigned origin = slurp->remap_module (owner: ns_import)) |
| 17763 | { |
| 17764 | module_state *from = (*modules)[origin]; |
| 17765 | if (ns_num < from->entity_num) |
| 17766 | { |
| 17767 | binding_slot &slot = (*entity_ary)[from->entity_lwm + ns_num]; |
| 17768 | |
| 17769 | if (!slot.is_lazy ()) |
| 17770 | ns = slot; |
| 17771 | } |
| 17772 | } |
| 17773 | else |
| 17774 | sec.set_overrun (); |
| 17775 | } |
| 17776 | else |
| 17777 | ns = global_namespace; |
| 17778 | |
| 17779 | return ns; |
| 17780 | } |
| 17781 | |
| 17782 | /* SPACES is a sorted vector of namespaces. Write out the namespaces |
| 17783 | to MOD_SNAME_PFX.nms section. */ |
| 17784 | |
| 17785 | void |
| 17786 | module_state::write_namespaces (elf_out *to, vec<depset *> spaces, |
| 17787 | unsigned num, unsigned *crc_p) |
| 17788 | { |
| 17789 | dump () && dump ("Writing namespaces" ); |
| 17790 | dump.indent (); |
| 17791 | |
| 17792 | bytes_out sec (to); |
| 17793 | sec.begin (); |
| 17794 | |
| 17795 | for (unsigned ix = 0; ix != num; ix++) |
| 17796 | { |
| 17797 | depset *b = spaces[ix]; |
| 17798 | tree ns = b->get_entity (); |
| 17799 | |
| 17800 | /* This could be an anonymous namespace even for a named module, |
| 17801 | since we can still emit no-linkage decls. */ |
| 17802 | gcc_checking_assert (TREE_CODE (ns) == NAMESPACE_DECL); |
| 17803 | |
| 17804 | unsigned flags = 0; |
| 17805 | if (TREE_PUBLIC (ns)) |
| 17806 | flags |= 1; |
| 17807 | if (DECL_NAMESPACE_INLINE_P (ns)) |
| 17808 | flags |= 2; |
| 17809 | if (DECL_MODULE_PURVIEW_P (ns)) |
| 17810 | flags |= 4; |
| 17811 | if (DECL_MODULE_EXPORT_P (ns)) |
| 17812 | flags |= 8; |
| 17813 | if (TREE_DEPRECATED (ns)) |
| 17814 | flags |= 16; |
| 17815 | |
| 17816 | dump () && dump ("Writing namespace:%u %N%s%s%s%s" , |
| 17817 | b->cluster, ns, |
| 17818 | flags & 1 ? ", public" : "" , |
| 17819 | flags & 2 ? ", inline" : "" , |
| 17820 | flags & 4 ? ", purview" : "" , |
| 17821 | flags & 8 ? ", export" : "" , |
| 17822 | flags & 16 ? ", deprecated" : "" ); |
| 17823 | sec.u (v: b->cluster); |
| 17824 | sec.u (v: to->name (DECL_NAME (ns))); |
| 17825 | write_namespace (sec, dep: b->deps[0]); |
| 17826 | |
| 17827 | sec.u (v: flags); |
| 17828 | write_location (sec, DECL_SOURCE_LOCATION (ns)); |
| 17829 | |
| 17830 | if (DECL_NAMESPACE_INLINE_P (ns)) |
| 17831 | { |
| 17832 | if (tree attr = lookup_attribute (attr_name: "abi_tag" , DECL_ATTRIBUTES (ns))) |
| 17833 | { |
| 17834 | tree tags = TREE_VALUE (attr); |
| 17835 | sec.u (v: list_length (tags)); |
| 17836 | for (tree tag = tags; tag; tag = TREE_CHAIN (tag)) |
| 17837 | sec.str (TREE_STRING_POINTER (TREE_VALUE (tag))); |
| 17838 | } |
| 17839 | else |
| 17840 | sec.u (v: 0); |
| 17841 | } |
| 17842 | } |
| 17843 | |
| 17844 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".nms" ), crc_ptr: crc_p); |
| 17845 | dump.outdent (); |
| 17846 | } |
| 17847 | |
| 17848 | /* Read the namespace hierarchy from MOD_SNAME_PFX.namespace. Fill in |
| 17849 | SPACES from that data. */ |
| 17850 | |
| 17851 | bool |
| 17852 | module_state::read_namespaces (unsigned num) |
| 17853 | { |
| 17854 | bytes_in sec; |
| 17855 | |
| 17856 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".nms" )) |
| 17857 | return false; |
| 17858 | |
| 17859 | dump () && dump ("Reading namespaces" ); |
| 17860 | dump.indent (); |
| 17861 | |
| 17862 | for (unsigned ix = 0; ix != num; ix++) |
| 17863 | { |
| 17864 | unsigned entity_index = sec.u (); |
| 17865 | unsigned name = sec.u (); |
| 17866 | |
| 17867 | tree parent = read_namespace (sec); |
| 17868 | |
| 17869 | /* See comment in write_namespace about why not bits. */ |
| 17870 | unsigned flags = sec.u (); |
| 17871 | location_t src_loc = read_location (sec); |
| 17872 | unsigned tags_count = (flags & 2) ? sec.u () : 0; |
| 17873 | |
| 17874 | if (entity_index >= entity_num |
| 17875 | || !parent |
| 17876 | || (flags & 0xc) == 0x8) |
| 17877 | sec.set_overrun (); |
| 17878 | |
| 17879 | tree tags = NULL_TREE; |
| 17880 | while (tags_count--) |
| 17881 | { |
| 17882 | size_t len; |
| 17883 | const char *str = sec.str (len_p: &len); |
| 17884 | tags = tree_cons (NULL_TREE, build_string (len + 1, str), tags); |
| 17885 | tags = nreverse (tags); |
| 17886 | } |
| 17887 | |
| 17888 | if (sec.get_overrun ()) |
| 17889 | break; |
| 17890 | |
| 17891 | tree id = name ? get_identifier (from ()->name (name)) : NULL_TREE; |
| 17892 | |
| 17893 | dump () && dump ("Read namespace:%u %P%s%s%s%s" , |
| 17894 | entity_index, parent, id, |
| 17895 | flags & 1 ? ", public" : "" , |
| 17896 | flags & 2 ? ", inline" : "" , |
| 17897 | flags & 4 ? ", purview" : "" , |
| 17898 | flags & 8 ? ", export" : "" , |
| 17899 | flags & 16 ? ", deprecated" : "" ); |
| 17900 | bool visible_p = ((flags & 8) |
| 17901 | || ((flags & 1) |
| 17902 | && (flags & 4) |
| 17903 | && (is_partition () || is_module ()))); |
| 17904 | tree inner = add_imported_namespace (ctx: parent, name: id, src_loc, module: mod, |
| 17905 | inline_p: bool (flags & 2), visible_p); |
| 17906 | if (!inner) |
| 17907 | { |
| 17908 | sec.set_overrun (); |
| 17909 | break; |
| 17910 | } |
| 17911 | |
| 17912 | if (is_partition ()) |
| 17913 | { |
| 17914 | if (flags & 4) |
| 17915 | DECL_MODULE_PURVIEW_P (inner) = true; |
| 17916 | if (flags & 8) |
| 17917 | DECL_MODULE_EXPORT_P (inner) = true; |
| 17918 | } |
| 17919 | |
| 17920 | if (flags & 16) |
| 17921 | TREE_DEPRECATED (inner) = true; |
| 17922 | |
| 17923 | if (tags) |
| 17924 | DECL_ATTRIBUTES (inner) |
| 17925 | = tree_cons (get_identifier ("abi_tag" ), tags, DECL_ATTRIBUTES (inner)); |
| 17926 | |
| 17927 | /* Install the namespace. */ |
| 17928 | (*entity_ary)[entity_lwm + entity_index] = inner; |
| 17929 | if (DECL_MODULE_IMPORT_P (inner)) |
| 17930 | { |
| 17931 | bool existed; |
| 17932 | unsigned *slot = &entity_map->get_or_insert |
| 17933 | (DECL_UID (inner), existed: &existed); |
| 17934 | if (existed) |
| 17935 | /* If it existed, it should match. */ |
| 17936 | gcc_checking_assert (inner == (*entity_ary)[*slot]); |
| 17937 | else |
| 17938 | *slot = entity_lwm + entity_index; |
| 17939 | } |
| 17940 | } |
| 17941 | |
| 17942 | dump.outdent (); |
| 17943 | if (!sec.end (src: from ())) |
| 17944 | return false; |
| 17945 | return true; |
| 17946 | } |
| 17947 | |
| 17948 | unsigned |
| 17949 | module_state::write_using_directives (elf_out *to, depset::hash &table, |
| 17950 | vec<depset *> spaces, unsigned *crc_p) |
| 17951 | { |
| 17952 | dump () && dump ("Writing using-directives" ); |
| 17953 | dump.indent (); |
| 17954 | |
| 17955 | bytes_out sec (to); |
| 17956 | sec.begin (); |
| 17957 | |
| 17958 | unsigned num = 0; |
| 17959 | auto emit_one_ns = [&](depset *parent_dep) |
| 17960 | { |
| 17961 | tree parent = parent_dep->get_entity (); |
| 17962 | for (auto udir : NAMESPACE_LEVEL (parent)->using_directives) |
| 17963 | { |
| 17964 | if (TREE_CODE (udir) != USING_DECL || !DECL_MODULE_PURVIEW_P (udir)) |
| 17965 | continue; |
| 17966 | bool exported = DECL_MODULE_EXPORT_P (udir); |
| 17967 | tree target = USING_DECL_DECLS (udir); |
| 17968 | depset *target_dep = table.find_dependency (decl: target); |
| 17969 | |
| 17970 | /* An using-directive imported from a different module might not |
| 17971 | have been walked earlier (PR c++/122915). But importers will |
| 17972 | be able to just refer to the decl in that module unless it was |
| 17973 | a partition anyway, so we don't have anything to do here. */ |
| 17974 | if (!target_dep) |
| 17975 | { |
| 17976 | gcc_checking_assert (DECL_MODULE_IMPORT_P (udir)); |
| 17977 | continue; |
| 17978 | } |
| 17979 | |
| 17980 | dump () && dump ("Writing using-directive in %N for %N" , |
| 17981 | parent, target); |
| 17982 | sec.u (v: exported); |
| 17983 | write_namespace (sec, dep: parent_dep); |
| 17984 | write_namespace (sec, dep: target_dep); |
| 17985 | ++num; |
| 17986 | } |
| 17987 | }; |
| 17988 | |
| 17989 | emit_one_ns (table.find_dependency (global_namespace)); |
| 17990 | for (depset *parent_dep : spaces) |
| 17991 | emit_one_ns (parent_dep); |
| 17992 | |
| 17993 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".udi" ), crc_ptr: crc_p); |
| 17994 | dump.outdent (); |
| 17995 | |
| 17996 | return num; |
| 17997 | } |
| 17998 | |
| 17999 | bool |
| 18000 | module_state::read_using_directives (unsigned num) |
| 18001 | { |
| 18002 | if (!bitmap_bit_p (this_module ()->imports, mod)) |
| 18003 | { |
| 18004 | dump () && dump ("Ignoring using-directives because module %M " |
| 18005 | "is not visible in this TU" , this); |
| 18006 | return true; |
| 18007 | } |
| 18008 | |
| 18009 | bytes_in sec; |
| 18010 | |
| 18011 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".udi" )) |
| 18012 | return false; |
| 18013 | |
| 18014 | dump () && dump ("Reading using-directives" ); |
| 18015 | dump.indent (); |
| 18016 | |
| 18017 | for (unsigned ix = 0; ix != num; ++ix) |
| 18018 | { |
| 18019 | bool exported = sec.u (); |
| 18020 | tree parent = read_namespace (sec); |
| 18021 | tree target = read_namespace (sec); |
| 18022 | if (sec.get_overrun ()) |
| 18023 | break; |
| 18024 | |
| 18025 | dump () && dump ("Read using-directive in %N for %N" , parent, target); |
| 18026 | if (exported || is_module () || is_partition ()) |
| 18027 | add_imported_using_namespace (parent, target); |
| 18028 | } |
| 18029 | |
| 18030 | dump.outdent (); |
| 18031 | if (!sec.end (src: from ())) |
| 18032 | return false; |
| 18033 | return true; |
| 18034 | } |
| 18035 | |
| 18036 | /* Write the binding TABLE to MOD_SNAME_PFX.bnd */ |
| 18037 | |
| 18038 | unsigned |
| 18039 | module_state::write_bindings (elf_out *to, vec<depset *> sccs, unsigned *crc_p) |
| 18040 | { |
| 18041 | dump () && dump ("Writing binding table" ); |
| 18042 | dump.indent (); |
| 18043 | |
| 18044 | unsigned num = 0; |
| 18045 | bytes_out sec (to); |
| 18046 | sec.begin (); |
| 18047 | |
| 18048 | for (unsigned ix = 0; ix != sccs.length (); ix++) |
| 18049 | { |
| 18050 | depset *b = sccs[ix]; |
| 18051 | if (b->is_binding ()) |
| 18052 | { |
| 18053 | tree ns = b->get_entity (); |
| 18054 | dump () && dump ("Bindings %P section:%u" , ns, b->get_name (), |
| 18055 | b->section); |
| 18056 | sec.u (v: to->name (ident: b->get_name ())); |
| 18057 | write_namespace (sec, dep: b->deps[0]); |
| 18058 | sec.u (v: b->section); |
| 18059 | num++; |
| 18060 | } |
| 18061 | } |
| 18062 | |
| 18063 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".bnd" ), crc_ptr: crc_p); |
| 18064 | dump.outdent (); |
| 18065 | |
| 18066 | return num; |
| 18067 | } |
| 18068 | |
| 18069 | /* Read the binding table from MOD_SNAME_PFX.bind. */ |
| 18070 | |
| 18071 | bool |
| 18072 | module_state::read_bindings (unsigned num, unsigned lwm, unsigned hwm) |
| 18073 | { |
| 18074 | bytes_in sec; |
| 18075 | |
| 18076 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".bnd" )) |
| 18077 | return false; |
| 18078 | |
| 18079 | dump () && dump ("Reading binding table" ); |
| 18080 | dump.indent (); |
| 18081 | for (; !sec.get_overrun () && num--;) |
| 18082 | { |
| 18083 | const char *name = from ()->name (offset: sec.u ()); |
| 18084 | tree ns = read_namespace (sec); |
| 18085 | unsigned snum = sec.u (); |
| 18086 | |
| 18087 | if (!ns || !name || (snum - lwm) >= (hwm - lwm)) |
| 18088 | sec.set_overrun (); |
| 18089 | if (!sec.get_overrun ()) |
| 18090 | { |
| 18091 | tree id = get_identifier (name); |
| 18092 | dump () && dump ("Bindings %P section:%u" , ns, id, snum); |
| 18093 | if (mod && !import_module_binding (ctx: ns, name: id, mod, snum)) |
| 18094 | break; |
| 18095 | } |
| 18096 | } |
| 18097 | |
| 18098 | dump.outdent (); |
| 18099 | if (!sec.end (src: from ())) |
| 18100 | return false; |
| 18101 | return true; |
| 18102 | } |
| 18103 | |
| 18104 | /* Write the entity table to MOD_SNAME_PFX.ent |
| 18105 | |
| 18106 | Each entry is a section number. */ |
| 18107 | |
| 18108 | void |
| 18109 | module_state::write_entities (elf_out *to, vec<depset *> depsets, |
| 18110 | unsigned count, unsigned *crc_p) |
| 18111 | { |
| 18112 | dump () && dump ("Writing entities" ); |
| 18113 | dump.indent (); |
| 18114 | |
| 18115 | bytes_out sec (to); |
| 18116 | sec.begin (); |
| 18117 | |
| 18118 | unsigned current = 0; |
| 18119 | for (unsigned ix = 0; ix < depsets.length (); ix++) |
| 18120 | { |
| 18121 | depset *d = depsets[ix]; |
| 18122 | |
| 18123 | switch (d->get_entity_kind ()) |
| 18124 | { |
| 18125 | default: |
| 18126 | break; |
| 18127 | |
| 18128 | case depset::EK_NAMESPACE: |
| 18129 | if (!d->is_import () && d->get_entity () != global_namespace) |
| 18130 | { |
| 18131 | gcc_checking_assert (d->cluster == current); |
| 18132 | current++; |
| 18133 | sec.u (v: 0); |
| 18134 | } |
| 18135 | break; |
| 18136 | |
| 18137 | case depset::EK_DECL: |
| 18138 | case depset::EK_SPECIALIZATION: |
| 18139 | case depset::EK_PARTIAL: |
| 18140 | gcc_checking_assert (!d->is_unreached () |
| 18141 | && !d->is_import () |
| 18142 | && d->cluster == current |
| 18143 | && d->section); |
| 18144 | current++; |
| 18145 | sec.u (v: d->section); |
| 18146 | break; |
| 18147 | } |
| 18148 | } |
| 18149 | gcc_assert (count == current); |
| 18150 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".ent" ), crc_ptr: crc_p); |
| 18151 | dump.outdent (); |
| 18152 | } |
| 18153 | |
| 18154 | bool |
| 18155 | module_state::read_entities (unsigned count, unsigned lwm, unsigned hwm) |
| 18156 | { |
| 18157 | trees_in sec (this); |
| 18158 | |
| 18159 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".ent" )) |
| 18160 | return false; |
| 18161 | |
| 18162 | dump () && dump ("Reading entities" ); |
| 18163 | dump.indent (); |
| 18164 | |
| 18165 | for (binding_slot *slot = entity_ary->begin () + entity_lwm; count--; slot++) |
| 18166 | { |
| 18167 | unsigned snum = sec.u (); |
| 18168 | if (snum && (snum - lwm) >= (hwm - lwm)) |
| 18169 | sec.set_overrun (); |
| 18170 | if (sec.get_overrun ()) |
| 18171 | break; |
| 18172 | |
| 18173 | if (snum) |
| 18174 | slot->set_lazy (snum << 2); |
| 18175 | } |
| 18176 | |
| 18177 | dump.outdent (); |
| 18178 | if (!sec.end (src: from ())) |
| 18179 | return false; |
| 18180 | return true; |
| 18181 | } |
| 18182 | |
| 18183 | /* Write the pending table to MOD_SNAME_PFX.pnd |
| 18184 | |
| 18185 | The pending table holds information about clusters that need to be |
| 18186 | loaded because they contain information about something that is not |
| 18187 | found by namespace-scope lookup. |
| 18188 | |
| 18189 | The three cases are: |
| 18190 | |
| 18191 | (a) Template (maybe-partial) specializations that we have |
| 18192 | instantiated or defined. When an importer needs to instantiate |
| 18193 | that template, they /must have/ the partial, explicit & extern |
| 18194 | specializations available. If they have the other specializations |
| 18195 | available, they'll have less work to do. Thus, when we're about to |
| 18196 | instantiate FOO, we have to be able to ask 'are there any |
| 18197 | specialization of FOO in our imports?'. |
| 18198 | |
| 18199 | (b) (Maybe-implicit) member functions definitions. A class could |
| 18200 | be defined in one header, and an inline member defined in a |
| 18201 | different header (this occurs in the STL). Similarly, like the |
| 18202 | specialization case, an implicit member function could have been |
| 18203 | 'instantiated' in one module, and it'd be nice to not have to |
| 18204 | reinstantiate it in another. |
| 18205 | |
| 18206 | (c) Classes completed elsewhere. A class could be declared in one |
| 18207 | header and defined in another. We need to know to load the class |
| 18208 | definition before looking in it. It does highlight an issue -- |
| 18209 | there could be an intermediate import between the outermost containing |
| 18210 | namespace-scope class and the innermost being-defined class. This is |
| 18211 | actually possible with all of these cases, so be aware -- we're not |
| 18212 | just talking of one level of import to get to the innermost namespace. |
| 18213 | |
| 18214 | This gets complicated fast, it took me multiple attempts to even |
| 18215 | get something remotely working. Partially because I focussed on |
| 18216 | optimizing what I think turns out to be a smaller problem, given |
| 18217 | the known need to do the more general case *anyway*. I document |
| 18218 | the smaller problem, because it does appear to be the natural way |
| 18219 | to do it. It's trap! |
| 18220 | |
| 18221 | **** THE TRAP |
| 18222 | |
| 18223 | Let's refer to the primary template or the containing class as the |
| 18224 | KEY. And the specialization or member as the PENDING-ENTITY. (To |
| 18225 | avoid having to say those mouthfuls all the time.) |
| 18226 | |
| 18227 | In either case, we have an entity and we need some way of mapping |
| 18228 | that to a set of entities that need to be loaded before we can |
| 18229 | proceed with whatever processing of the entity we were going to do. |
| 18230 | |
| 18231 | We need to link the key to the pending-entity in some way. Given a |
| 18232 | key, tell me the pending-entities I need to have loaded. However |
| 18233 | we tie the key to the pending-entity must not rely on the key being |
| 18234 | loaded -- that'd defeat the lazy loading scheme. |
| 18235 | |
| 18236 | As the key will be an import in we know its entity number (either |
| 18237 | because we imported it, or we're writing it out too). Thus we can |
| 18238 | generate a map of key-indices to pending-entities. The |
| 18239 | pending-entity indices will be into our span of the entity table, |
| 18240 | and thus allow them to be lazily loaded. The key index will be |
| 18241 | into another slot of the entity table. Notice that this checking |
| 18242 | could be expensive, we don't want to iterate over a bunch of |
| 18243 | pending-entity indices (across multiple imports), every time we're |
| 18244 | about do to the thing with the key. We need to quickly determine |
| 18245 | 'definitely nothing needed'. |
| 18246 | |
| 18247 | That's almost good enough, except that key indices are not unique |
| 18248 | in a couple of cases :( Specifically the Global Module or a module |
| 18249 | partition can result in multiple modules assigning an entity index |
| 18250 | for the key. The decl-merging on loading will detect that so we |
| 18251 | only have one Key loaded, and in the entity hash it'll indicate the |
| 18252 | entity index of first load. Which might be different to how we |
| 18253 | know it. Notice this is restricted to GM entities or this-module |
| 18254 | entities. Foreign imports cannot have this. |
| 18255 | |
| 18256 | We can simply resolve this in the direction of how this module |
| 18257 | referred to the key to how the importer knows it. Look in the |
| 18258 | entity table slot that we nominate, maybe lazy load it, and then |
| 18259 | lookup the resultant entity in the entity hash to learn how the |
| 18260 | importer knows it. |
| 18261 | |
| 18262 | But we need to go in the other direction :( Given the key, find all |
| 18263 | the index-aliases of that key. We can partially solve that by |
| 18264 | adding an alias hash table. Whenever we load a merged decl, add or |
| 18265 | augment a mapping from the entity (or its entity-index) to the |
| 18266 | newly-discovered index. Then when we look for pending entities of |
| 18267 | a key, we also iterate over this aliases this mapping provides. |
| 18268 | |
| 18269 | But that requires the alias to be loaded. And that's not |
| 18270 | necessarily true. |
| 18271 | |
| 18272 | *** THE SIMPLER WAY |
| 18273 | |
| 18274 | The remaining fixed thing we have is the innermost namespace |
| 18275 | containing the ultimate namespace-scope container of the key and |
| 18276 | the name of that container (which might be the key itself). I.e. a |
| 18277 | namespace-decl/identifier/module tuple. Let's call this the |
| 18278 | top-key. We'll discover that the module is not important here, |
| 18279 | because of cross-module possibilities mentioned in case #c above. |
| 18280 | We can't markup namespace-binding slots. The best we can do is |
| 18281 | mark the binding vector with 'there's something here', and have |
| 18282 | another map from namespace/identifier pairs to a vector of pending |
| 18283 | entity indices. |
| 18284 | |
| 18285 | Maintain a pending-entity map. This is keyed by top-key, and |
| 18286 | maps to a vector of pending-entity indices. On the binding vector |
| 18287 | have flags saying whether the pending-name-entity map has contents. |
| 18288 | (We might want to further extend the key to be GM-vs-Partition and |
| 18289 | specialization-vs-member, but let's not get ahead of ourselves.) |
| 18290 | |
| 18291 | For every key-like entity, find the outermost namespace-scope |
| 18292 | name. Use that to lookup in the pending-entity map and then make |
| 18293 | sure the specified entities are loaded. |
| 18294 | |
| 18295 | An optimization might be to have a flag in each key-entity saying |
| 18296 | that its top key might be in the entity table. It's not clear to |
| 18297 | me how to set that flag cheaply -- cheaper than just looking. |
| 18298 | |
| 18299 | FIXME: It'd be nice to have a bit in decls to tell us whether to |
| 18300 | even try this. We can have a 'already done' flag, that we set when |
| 18301 | we've done KLASS's lazy pendings. When we import a module that |
| 18302 | registers pendings on the same top-key as KLASS we need to clear |
| 18303 | the flag. A recursive walk of the top-key clearing the bit will |
| 18304 | suffice. Plus we only need to recurse on classes that have the bit |
| 18305 | set. (That means we need to set the bit on parents of KLASS here, |
| 18306 | don't forget.) However, first: correctness, second: efficiency. */ |
| 18307 | |
| 18308 | unsigned |
| 18309 | module_state::write_pendings (elf_out *to, vec<depset *> depsets, |
| 18310 | depset::hash &table, unsigned *crc_p) |
| 18311 | { |
| 18312 | dump () && dump ("Writing pending-entities" ); |
| 18313 | dump.indent (); |
| 18314 | |
| 18315 | trees_out sec (to, this, table); |
| 18316 | sec.begin (); |
| 18317 | |
| 18318 | unsigned count = 0; |
| 18319 | tree cache_ns = NULL_TREE; |
| 18320 | tree cache_id = NULL_TREE; |
| 18321 | unsigned cache_section = ~0; |
| 18322 | for (unsigned ix = 0; ix < depsets.length (); ix++) |
| 18323 | { |
| 18324 | depset *d = depsets[ix]; |
| 18325 | |
| 18326 | if (d->is_binding ()) |
| 18327 | continue; |
| 18328 | |
| 18329 | if (d->is_import ()) |
| 18330 | continue; |
| 18331 | |
| 18332 | if (!d->is_pending_entity ()) |
| 18333 | continue; |
| 18334 | |
| 18335 | tree key_decl = nullptr; |
| 18336 | tree key_ns = find_pending_key (decl: d->get_entity (), decl_p: &key_decl); |
| 18337 | tree key_name = DECL_NAME (key_decl); |
| 18338 | |
| 18339 | if (IDENTIFIER_ANON_P (key_name)) |
| 18340 | { |
| 18341 | gcc_checking_assert (IDENTIFIER_LAMBDA_P (key_name)); |
| 18342 | if (tree attached = LAMBDA_TYPE_EXTRA_SCOPE (TREE_TYPE (key_decl))) |
| 18343 | key_name = DECL_NAME (attached); |
| 18344 | else |
| 18345 | { |
| 18346 | /* There's nothing to attach it to. Must |
| 18347 | always reinstantiate. */ |
| 18348 | dump () |
| 18349 | && dump ("Unattached lambda %N[%u] section:%u" , |
| 18350 | d->get_entity_kind () == depset::EK_DECL |
| 18351 | ? "Member" : "Specialization" , d->get_entity (), |
| 18352 | d->cluster, d->section); |
| 18353 | continue; |
| 18354 | } |
| 18355 | } |
| 18356 | |
| 18357 | char const *also = "" ; |
| 18358 | if (d->section == cache_section |
| 18359 | && key_ns == cache_ns |
| 18360 | && key_name == cache_id) |
| 18361 | /* Same section & key as previous, no need to repeat ourselves. */ |
| 18362 | also = "also " ; |
| 18363 | else |
| 18364 | { |
| 18365 | cache_ns = key_ns; |
| 18366 | cache_id = key_name; |
| 18367 | cache_section = d->section; |
| 18368 | gcc_checking_assert (table.find_dependency (cache_ns)); |
| 18369 | sec.tree_node (t: cache_ns); |
| 18370 | sec.tree_node (t: cache_id); |
| 18371 | sec.u (v: d->cluster); |
| 18372 | count++; |
| 18373 | } |
| 18374 | dump () && dump ("Pending %s %N entity:%u section:%u %skeyed to %P" , |
| 18375 | d->get_entity_kind () == depset::EK_DECL |
| 18376 | ? "member" : "specialization" , d->get_entity (), |
| 18377 | d->cluster, cache_section, also, cache_ns, cache_id); |
| 18378 | } |
| 18379 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".pnd" ), crc_ptr: crc_p); |
| 18380 | dump.outdent (); |
| 18381 | |
| 18382 | return count; |
| 18383 | } |
| 18384 | |
| 18385 | bool |
| 18386 | module_state::read_pendings (unsigned count) |
| 18387 | { |
| 18388 | trees_in sec (this); |
| 18389 | |
| 18390 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".pnd" )) |
| 18391 | return false; |
| 18392 | |
| 18393 | dump () && dump ("Reading %u pendings" , count); |
| 18394 | dump.indent (); |
| 18395 | |
| 18396 | for (unsigned ix = 0; ix != count; ix++) |
| 18397 | { |
| 18398 | pending_key key; |
| 18399 | unsigned index; |
| 18400 | |
| 18401 | key.ns = sec.tree_node (); |
| 18402 | key.id = sec.tree_node (); |
| 18403 | index = sec.u (); |
| 18404 | |
| 18405 | if (!key.ns || !key.id |
| 18406 | || !(TREE_CODE (key.ns) == NAMESPACE_DECL |
| 18407 | && !DECL_NAMESPACE_ALIAS (key.ns)) |
| 18408 | || !identifier_p (t: key.id) |
| 18409 | || index >= entity_num) |
| 18410 | sec.set_overrun (); |
| 18411 | |
| 18412 | if (sec.get_overrun ()) |
| 18413 | break; |
| 18414 | |
| 18415 | dump () && dump ("Pending:%u keyed to %P" , index, key.ns, key.id); |
| 18416 | |
| 18417 | index += entity_lwm; |
| 18418 | auto &vec = pending_table->get_or_insert (k: key); |
| 18419 | vec.safe_push (obj: index); |
| 18420 | } |
| 18421 | |
| 18422 | dump.outdent (); |
| 18423 | if (!sec.end (src: from ())) |
| 18424 | return false; |
| 18425 | return true; |
| 18426 | } |
| 18427 | |
| 18428 | /* Read & write locations. */ |
| 18429 | enum loc_kind { |
| 18430 | LK_ORDINARY, |
| 18431 | LK_MACRO, |
| 18432 | LK_IMPORT_ORDINARY, |
| 18433 | LK_IMPORT_MACRO, |
| 18434 | LK_ADHOC, |
| 18435 | LK_RESERVED, |
| 18436 | }; |
| 18437 | |
| 18438 | static const module_state * |
| 18439 | module_for_ordinary_loc (location_t loc) |
| 18440 | { |
| 18441 | unsigned pos = 0; |
| 18442 | unsigned len = ool->length () - pos; |
| 18443 | |
| 18444 | while (len) |
| 18445 | { |
| 18446 | unsigned half = len / 2; |
| 18447 | module_state *probe = (*ool)[pos + half]; |
| 18448 | if (loc < probe->ordinary_locs.first) |
| 18449 | len = half; |
| 18450 | else if (loc < probe->ordinary_locs.first + probe->ordinary_locs.second) |
| 18451 | return probe; |
| 18452 | else |
| 18453 | { |
| 18454 | pos += half + 1; |
| 18455 | len = len - (half + 1); |
| 18456 | } |
| 18457 | } |
| 18458 | |
| 18459 | return nullptr; |
| 18460 | } |
| 18461 | |
| 18462 | static const module_state * |
| 18463 | module_for_macro_loc (location_t loc) |
| 18464 | { |
| 18465 | unsigned pos = 1; |
| 18466 | unsigned len = modules->length () - pos; |
| 18467 | |
| 18468 | while (len) |
| 18469 | { |
| 18470 | unsigned half = len / 2; |
| 18471 | module_state *probe = (*modules)[pos + half]; |
| 18472 | if (loc < probe->macro_locs.first) |
| 18473 | { |
| 18474 | pos += half + 1; |
| 18475 | len = len - (half + 1); |
| 18476 | } |
| 18477 | else if (loc >= probe->macro_locs.first + probe->macro_locs.second) |
| 18478 | len = half; |
| 18479 | else |
| 18480 | return probe; |
| 18481 | } |
| 18482 | |
| 18483 | return NULL; |
| 18484 | } |
| 18485 | |
| 18486 | location_t |
| 18487 | module_state::imported_from () const |
| 18488 | { |
| 18489 | location_t from = loc; |
| 18490 | line_map_ordinary const *fmap |
| 18491 | = linemap_check_ordinary (map: linemap_lookup (line_table, from)); |
| 18492 | |
| 18493 | if (MAP_MODULE_P (map: fmap)) |
| 18494 | from = linemap_included_from (ord_map: fmap); |
| 18495 | |
| 18496 | return from; |
| 18497 | } |
| 18498 | |
| 18499 | /* Note that LOC will need writing. This allows us to prune locations |
| 18500 | that are not needed. */ |
| 18501 | |
| 18502 | bool |
| 18503 | module_state::note_location (location_t loc) |
| 18504 | { |
| 18505 | bool added = false; |
| 18506 | if (!macro_loc_table && !ord_loc_table) |
| 18507 | ; |
| 18508 | else if (loc < RESERVED_LOCATION_COUNT) |
| 18509 | ; |
| 18510 | else if (IS_ADHOC_LOC (loc)) |
| 18511 | { |
| 18512 | location_t locus = get_location_from_adhoc_loc (line_table, loc); |
| 18513 | note_location (loc: locus); |
| 18514 | source_range range = get_range_from_loc (set: line_table, loc); |
| 18515 | if (range.m_start != locus) |
| 18516 | note_location (loc: range.m_start); |
| 18517 | note_location (loc: range.m_finish); |
| 18518 | } |
| 18519 | else if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table)) |
| 18520 | { |
| 18521 | if (spans.macro (loc)) |
| 18522 | { |
| 18523 | const line_map *map = linemap_lookup (line_table, loc); |
| 18524 | const line_map_macro *mac_map = linemap_check_macro (map); |
| 18525 | hashval_t hv = macro_loc_traits::hash (p: mac_map); |
| 18526 | macro_loc_info *slot |
| 18527 | = macro_loc_table->find_slot_with_hash (comparable: mac_map, hash: hv, insert: INSERT); |
| 18528 | if (!slot->src) |
| 18529 | { |
| 18530 | slot->src = mac_map; |
| 18531 | slot->remap = 0; |
| 18532 | // Expansion locations could themselves be from a |
| 18533 | // macro, we need to note them all. |
| 18534 | note_location (loc: mac_map->m_expansion); |
| 18535 | gcc_checking_assert (mac_map->n_tokens); |
| 18536 | location_t tloc = UNKNOWN_LOCATION; |
| 18537 | for (unsigned ix = mac_map->n_tokens * 2; ix--;) |
| 18538 | if (mac_map->macro_locations[ix] != tloc) |
| 18539 | { |
| 18540 | tloc = mac_map->macro_locations[ix]; |
| 18541 | note_location (loc: tloc); |
| 18542 | } |
| 18543 | added = true; |
| 18544 | } |
| 18545 | } |
| 18546 | } |
| 18547 | else if (IS_ORDINARY_LOC (loc)) |
| 18548 | { |
| 18549 | if (spans.ordinary (loc)) |
| 18550 | { |
| 18551 | const line_map *map = linemap_lookup (line_table, loc); |
| 18552 | const line_map_ordinary *ord_map = linemap_check_ordinary (map); |
| 18553 | ord_loc_info lkup; |
| 18554 | lkup.src = ord_map; |
| 18555 | lkup.span = loc_one << ord_map->m_column_and_range_bits; |
| 18556 | lkup.offset = (loc - MAP_START_LOCATION (map: ord_map)) & ~(lkup.span - 1); |
| 18557 | lkup.remap = 0; |
| 18558 | ord_loc_info *slot = (ord_loc_table->find_slot_with_hash |
| 18559 | (comparable: lkup, hash: ord_loc_traits::hash (v: lkup), insert: INSERT)); |
| 18560 | if (!slot->src) |
| 18561 | { |
| 18562 | *slot = lkup; |
| 18563 | added = true; |
| 18564 | } |
| 18565 | } |
| 18566 | } |
| 18567 | else |
| 18568 | gcc_unreachable (); |
| 18569 | return added; |
| 18570 | } |
| 18571 | |
| 18572 | /* If we're not streaming, record that we need location LOC. |
| 18573 | Otherwise stream it. */ |
| 18574 | |
| 18575 | void |
| 18576 | module_state::write_location (bytes_out &sec, location_t loc) |
| 18577 | { |
| 18578 | if (!sec.streaming_p ()) |
| 18579 | { |
| 18580 | note_location (loc); |
| 18581 | return; |
| 18582 | } |
| 18583 | |
| 18584 | if (loc < RESERVED_LOCATION_COUNT) |
| 18585 | { |
| 18586 | dump (dumper::LOCATION) && dump ("Reserved location %K" , loc); |
| 18587 | sec.loc (l: LK_RESERVED + loc); |
| 18588 | } |
| 18589 | else if (IS_ADHOC_LOC (loc)) |
| 18590 | { |
| 18591 | dump (dumper::LOCATION) && dump ("Adhoc location" ); |
| 18592 | sec.u (v: LK_ADHOC); |
| 18593 | location_t locus = get_location_from_adhoc_loc (line_table, loc); |
| 18594 | write_location (sec, loc: locus); |
| 18595 | source_range range = get_range_from_loc (set: line_table, loc); |
| 18596 | if (range.m_start == locus) |
| 18597 | /* Compress. */ |
| 18598 | range.m_start = UNKNOWN_LOCATION; |
| 18599 | write_location (sec, loc: range.m_start); |
| 18600 | write_location (sec, loc: range.m_finish); |
| 18601 | unsigned discriminator = get_discriminator_from_adhoc_loc (line_table, loc); |
| 18602 | sec.u (v: discriminator); |
| 18603 | } |
| 18604 | else if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table)) |
| 18605 | { |
| 18606 | const macro_loc_info *info = nullptr; |
| 18607 | line_map_uint_t offset = 0; |
| 18608 | if (unsigned hwm = macro_loc_remap->length ()) |
| 18609 | { |
| 18610 | info = macro_loc_remap->begin (); |
| 18611 | while (hwm != 1) |
| 18612 | { |
| 18613 | unsigned mid = hwm / 2; |
| 18614 | if (MAP_START_LOCATION (map: info[mid].src) <= loc) |
| 18615 | { |
| 18616 | info += mid; |
| 18617 | hwm -= mid; |
| 18618 | } |
| 18619 | else |
| 18620 | hwm = mid; |
| 18621 | } |
| 18622 | offset = loc - MAP_START_LOCATION (map: info->src); |
| 18623 | if (offset > info->src->n_tokens) |
| 18624 | info = nullptr; |
| 18625 | } |
| 18626 | |
| 18627 | gcc_checking_assert (bool (info) == bool (spans.macro (loc))); |
| 18628 | |
| 18629 | if (info) |
| 18630 | { |
| 18631 | offset += info->remap; |
| 18632 | sec.u (v: LK_MACRO); |
| 18633 | sec.loc (l: offset); |
| 18634 | dump (dumper::LOCATION) |
| 18635 | && dump ("Macro location %K output %K" , loc, offset); |
| 18636 | } |
| 18637 | else if (const module_state *import = module_for_macro_loc (loc)) |
| 18638 | { |
| 18639 | auto off = loc - import->macro_locs.first; |
| 18640 | sec.u (v: LK_IMPORT_MACRO); |
| 18641 | sec.u (v: import->remap); |
| 18642 | sec.loc (l: off); |
| 18643 | dump (dumper::LOCATION) |
| 18644 | && dump ("Imported macro location %K output %u:%K" , |
| 18645 | loc, import->remap, off); |
| 18646 | } |
| 18647 | else |
| 18648 | gcc_unreachable (); |
| 18649 | } |
| 18650 | else if (IS_ORDINARY_LOC (loc)) |
| 18651 | { |
| 18652 | /* If we ran out of locations for imported decls, this location could |
| 18653 | be a module unit's location. In that case, remap the location |
| 18654 | to be where we imported the module from. */ |
| 18655 | if (spans.locations_exhausted_p () || CHECKING_P) |
| 18656 | { |
| 18657 | const line_map_ordinary *map |
| 18658 | = linemap_check_ordinary (map: linemap_lookup (line_table, loc)); |
| 18659 | if (MAP_MODULE_P (map) && loc == MAP_START_LOCATION (map)) |
| 18660 | { |
| 18661 | gcc_checking_assert (spans.locations_exhausted_p ()); |
| 18662 | write_location (sec, loc: linemap_included_from (ord_map: map)); |
| 18663 | return; |
| 18664 | } |
| 18665 | } |
| 18666 | |
| 18667 | const ord_loc_info *info = nullptr; |
| 18668 | line_map_uint_t offset = 0; |
| 18669 | if (line_map_uint_t hwm = ord_loc_remap->length ()) |
| 18670 | { |
| 18671 | info = ord_loc_remap->begin (); |
| 18672 | while (hwm != 1) |
| 18673 | { |
| 18674 | auto mid = hwm / 2; |
| 18675 | if (MAP_START_LOCATION (map: info[mid].src) + info[mid].offset <= loc) |
| 18676 | { |
| 18677 | info += mid; |
| 18678 | hwm -= mid; |
| 18679 | } |
| 18680 | else |
| 18681 | hwm = mid; |
| 18682 | } |
| 18683 | offset = loc - MAP_START_LOCATION (map: info->src) - info->offset; |
| 18684 | if (offset > info->span) |
| 18685 | info = nullptr; |
| 18686 | } |
| 18687 | |
| 18688 | gcc_checking_assert (bool (info) == bool (spans.ordinary (loc))); |
| 18689 | |
| 18690 | if (info) |
| 18691 | { |
| 18692 | offset += info->remap; |
| 18693 | sec.u (v: LK_ORDINARY); |
| 18694 | sec.loc (l: offset); |
| 18695 | |
| 18696 | dump (dumper::LOCATION) |
| 18697 | && dump ("Ordinary location %K output %K" , loc, offset); |
| 18698 | } |
| 18699 | else if (const module_state *import = module_for_ordinary_loc (loc)) |
| 18700 | { |
| 18701 | auto off = loc - import->ordinary_locs.first; |
| 18702 | sec.u (v: LK_IMPORT_ORDINARY); |
| 18703 | sec.u (v: import->remap); |
| 18704 | sec.loc (l: off); |
| 18705 | dump (dumper::LOCATION) |
| 18706 | && dump ("Imported ordinary location %K output %u:%K" , |
| 18707 | loc, import->remap, off); |
| 18708 | } |
| 18709 | else |
| 18710 | gcc_unreachable (); |
| 18711 | } |
| 18712 | else |
| 18713 | gcc_unreachable (); |
| 18714 | } |
| 18715 | |
| 18716 | location_t |
| 18717 | module_state::read_location (bytes_in &sec) const |
| 18718 | { |
| 18719 | location_t locus = UNKNOWN_LOCATION; |
| 18720 | unsigned kind = sec.u (); |
| 18721 | switch (kind) |
| 18722 | { |
| 18723 | default: |
| 18724 | { |
| 18725 | if (kind < LK_RESERVED + RESERVED_LOCATION_COUNT) |
| 18726 | locus = location_t (kind - LK_RESERVED); |
| 18727 | else |
| 18728 | sec.set_overrun (); |
| 18729 | dump (dumper::LOCATION) |
| 18730 | && dump ("Reserved location %K" , locus); |
| 18731 | } |
| 18732 | break; |
| 18733 | |
| 18734 | case LK_ADHOC: |
| 18735 | { |
| 18736 | dump (dumper::LOCATION) && dump ("Adhoc location" ); |
| 18737 | locus = read_location (sec); |
| 18738 | source_range range; |
| 18739 | range.m_start = read_location (sec); |
| 18740 | if (range.m_start == UNKNOWN_LOCATION) |
| 18741 | range.m_start = locus; |
| 18742 | range.m_finish = read_location (sec); |
| 18743 | unsigned discriminator = sec.u (); |
| 18744 | if (locus != loc && range.m_start != loc && range.m_finish != loc) |
| 18745 | locus = line_table->get_or_create_combined_loc (locus, src_range: range, |
| 18746 | data: nullptr, discriminator); |
| 18747 | } |
| 18748 | break; |
| 18749 | |
| 18750 | case LK_MACRO: |
| 18751 | { |
| 18752 | auto off = sec.loc (); |
| 18753 | |
| 18754 | if (macro_locs.second) |
| 18755 | { |
| 18756 | if (off < macro_locs.second) |
| 18757 | locus = off + macro_locs.first; |
| 18758 | else |
| 18759 | sec.set_overrun (); |
| 18760 | } |
| 18761 | else |
| 18762 | locus = loc; |
| 18763 | dump (dumper::LOCATION) |
| 18764 | && dump ("Macro %K becoming %K" , off, locus); |
| 18765 | } |
| 18766 | break; |
| 18767 | |
| 18768 | case LK_ORDINARY: |
| 18769 | { |
| 18770 | auto off = sec.loc (); |
| 18771 | if (ordinary_locs.second) |
| 18772 | { |
| 18773 | if (off < ordinary_locs.second) |
| 18774 | locus = off + ordinary_locs.first; |
| 18775 | else |
| 18776 | sec.set_overrun (); |
| 18777 | } |
| 18778 | else |
| 18779 | locus = loc; |
| 18780 | |
| 18781 | dump (dumper::LOCATION) |
| 18782 | && dump ("Ordinary location %K becoming %K" , off, locus); |
| 18783 | } |
| 18784 | break; |
| 18785 | |
| 18786 | case LK_IMPORT_MACRO: |
| 18787 | case LK_IMPORT_ORDINARY: |
| 18788 | { |
| 18789 | unsigned mod = sec.u (); |
| 18790 | location_t off = sec.loc (); |
| 18791 | const module_state *import = NULL; |
| 18792 | |
| 18793 | if (!mod && !slurp->remap) |
| 18794 | /* This is an early read of a partition location during the |
| 18795 | read of our ordinary location map. */ |
| 18796 | import = this; |
| 18797 | else |
| 18798 | { |
| 18799 | mod = slurp->remap_module (owner: mod); |
| 18800 | if (!mod) |
| 18801 | sec.set_overrun (); |
| 18802 | else |
| 18803 | import = (*modules)[mod]; |
| 18804 | } |
| 18805 | |
| 18806 | if (import) |
| 18807 | { |
| 18808 | if (kind == LK_IMPORT_MACRO) |
| 18809 | { |
| 18810 | if (!import->macro_locs.second) |
| 18811 | locus = import->loc; |
| 18812 | else if (off < import->macro_locs.second) |
| 18813 | locus = off + import->macro_locs.first; |
| 18814 | else |
| 18815 | sec.set_overrun (); |
| 18816 | } |
| 18817 | else |
| 18818 | { |
| 18819 | if (!import->ordinary_locs.second) |
| 18820 | locus = import->loc; |
| 18821 | else if (off < import->ordinary_locs.second) |
| 18822 | locus = import->ordinary_locs.first + off; |
| 18823 | else |
| 18824 | sec.set_overrun (); |
| 18825 | } |
| 18826 | } |
| 18827 | } |
| 18828 | break; |
| 18829 | } |
| 18830 | |
| 18831 | return locus; |
| 18832 | } |
| 18833 | |
| 18834 | /* Allocate hash tables to record needed locations. */ |
| 18835 | |
| 18836 | void |
| 18837 | module_state::write_init_maps () |
| 18838 | { |
| 18839 | macro_loc_table = new hash_table<macro_loc_traits> (EXPERIMENT (1, 400)); |
| 18840 | ord_loc_table = new hash_table<ord_loc_traits> (EXPERIMENT (1, 400)); |
| 18841 | } |
| 18842 | |
| 18843 | /* Prepare the span adjustments. We prune unneeded locations -- at |
| 18844 | this point every needed location must have been seen by |
| 18845 | note_location. */ |
| 18846 | |
| 18847 | range_t |
| 18848 | module_state::write_prepare_maps (module_state_config *cfg, bool has_partitions) |
| 18849 | { |
| 18850 | dump () && dump ("Preparing locations" ); |
| 18851 | dump.indent (); |
| 18852 | |
| 18853 | dump () && dump ("Reserved locations [%K,%K) macro [%K,%K)" , |
| 18854 | spans[loc_spans::SPAN_RESERVED].ordinary.first, |
| 18855 | spans[loc_spans::SPAN_RESERVED].ordinary.second, |
| 18856 | spans[loc_spans::SPAN_RESERVED].macro.first, |
| 18857 | spans[loc_spans::SPAN_RESERVED].macro.second); |
| 18858 | |
| 18859 | range_t info {0, 0}; |
| 18860 | |
| 18861 | // Sort the noted lines. |
| 18862 | vec_alloc (v&: ord_loc_remap, nelems: ord_loc_table->size ()); |
| 18863 | for (auto iter = ord_loc_table->begin (), end = ord_loc_table->end (); |
| 18864 | iter != end; ++iter) |
| 18865 | ord_loc_remap->quick_push (obj: *iter); |
| 18866 | ord_loc_remap->qsort (&ord_loc_info::compare); |
| 18867 | |
| 18868 | // Note included-from maps. |
| 18869 | bool added = false; |
| 18870 | const line_map_ordinary *current = nullptr; |
| 18871 | for (auto iter = ord_loc_remap->begin (), end = ord_loc_remap->end (); |
| 18872 | iter != end; ++iter) |
| 18873 | if (iter->src != current) |
| 18874 | { |
| 18875 | current = iter->src; |
| 18876 | for (auto probe = current; |
| 18877 | auto from = linemap_included_from (ord_map: probe); |
| 18878 | probe = linemap_check_ordinary (map: linemap_lookup (line_table, from))) |
| 18879 | { |
| 18880 | if (has_partitions) |
| 18881 | { |
| 18882 | // Partition locations need to elide their module map |
| 18883 | // entry. |
| 18884 | probe |
| 18885 | = linemap_check_ordinary (map: linemap_lookup (line_table, from)); |
| 18886 | if (MAP_MODULE_P (map: probe)) |
| 18887 | from = linemap_included_from (ord_map: probe); |
| 18888 | } |
| 18889 | |
| 18890 | if (!note_location (loc: from)) |
| 18891 | break; |
| 18892 | added = true; |
| 18893 | } |
| 18894 | } |
| 18895 | if (added) |
| 18896 | { |
| 18897 | // Reconstruct the line array as we added items to the hash table. |
| 18898 | vec_free (v&: ord_loc_remap); |
| 18899 | vec_alloc (v&: ord_loc_remap, nelems: ord_loc_table->size ()); |
| 18900 | for (auto iter = ord_loc_table->begin (), end = ord_loc_table->end (); |
| 18901 | iter != end; ++iter) |
| 18902 | ord_loc_remap->quick_push (obj: *iter); |
| 18903 | ord_loc_remap->qsort (&ord_loc_info::compare); |
| 18904 | } |
| 18905 | delete ord_loc_table; |
| 18906 | ord_loc_table = nullptr; |
| 18907 | |
| 18908 | // Merge (sufficiently) adjacent spans, and calculate remapping. |
| 18909 | constexpr line_map_uint_t adjacency = 2; // Allow 2 missing lines. |
| 18910 | auto begin = ord_loc_remap->begin (), end = ord_loc_remap->end (); |
| 18911 | auto dst = begin; |
| 18912 | line_map_uint_t offset = 0; |
| 18913 | unsigned range_bits = 0; |
| 18914 | ord_loc_info *base = nullptr; |
| 18915 | for (auto iter = begin; iter != end; ++iter) |
| 18916 | { |
| 18917 | if (base && iter->src == base->src) |
| 18918 | { |
| 18919 | if (base->offset + base->span + |
| 18920 | ((adjacency << base->src->m_column_and_range_bits) |
| 18921 | // If there are few c&r bits, allow further separation. |
| 18922 | | (adjacency << 4)) |
| 18923 | >= iter->offset) |
| 18924 | { |
| 18925 | // Merge. |
| 18926 | offset -= base->span; |
| 18927 | base->span = iter->offset + iter->span - base->offset; |
| 18928 | offset += base->span; |
| 18929 | continue; |
| 18930 | } |
| 18931 | } |
| 18932 | else if (range_bits < iter->src->m_range_bits) |
| 18933 | range_bits = iter->src->m_range_bits; |
| 18934 | |
| 18935 | offset += ((loc_one << iter->src->m_range_bits) - 1); |
| 18936 | offset &= ~((loc_one << iter->src->m_range_bits) - 1); |
| 18937 | iter->remap = offset; |
| 18938 | offset += iter->span; |
| 18939 | base = dst; |
| 18940 | *dst++ = *iter; |
| 18941 | } |
| 18942 | ord_loc_remap->truncate (size: dst - begin); |
| 18943 | |
| 18944 | info.first = ord_loc_remap->length (); |
| 18945 | cfg->ordinary_locs = offset; |
| 18946 | cfg->loc_range_bits = range_bits; |
| 18947 | dump () && dump ("Ordinary maps:%K locs:%K range_bits:%u" , |
| 18948 | info.first, |
| 18949 | cfg->ordinary_locs, |
| 18950 | cfg->loc_range_bits); |
| 18951 | |
| 18952 | // Remap the macro locations. |
| 18953 | vec_alloc (v&: macro_loc_remap, nelems: macro_loc_table->size ()); |
| 18954 | for (auto iter = macro_loc_table->begin (), end = macro_loc_table->end (); |
| 18955 | iter != end; ++iter) |
| 18956 | macro_loc_remap->quick_push (obj: *iter); |
| 18957 | delete macro_loc_table; |
| 18958 | macro_loc_table = nullptr; |
| 18959 | |
| 18960 | macro_loc_remap->qsort (¯o_loc_info::compare); |
| 18961 | offset = 0; |
| 18962 | for (auto iter = macro_loc_remap->begin (), end = macro_loc_remap->end (); |
| 18963 | iter != end; ++iter) |
| 18964 | { |
| 18965 | auto mac = iter->src; |
| 18966 | iter->remap = offset; |
| 18967 | offset += mac->n_tokens; |
| 18968 | } |
| 18969 | info.second = macro_loc_remap->length (); |
| 18970 | cfg->macro_locs = offset; |
| 18971 | |
| 18972 | dump () && dump ("Macro maps:%K locs:%K" , info.second, cfg->macro_locs); |
| 18973 | |
| 18974 | dump.outdent (); |
| 18975 | |
| 18976 | // If we have no ordinary locs, we must also have no macro locs. |
| 18977 | gcc_checking_assert (cfg->ordinary_locs || !cfg->macro_locs); |
| 18978 | |
| 18979 | return info; |
| 18980 | } |
| 18981 | |
| 18982 | bool |
| 18983 | module_state::read_prepare_maps (const module_state_config *cfg) |
| 18984 | { |
| 18985 | location_t ordinary = line_table->highest_location + 1; |
| 18986 | ordinary += cfg->ordinary_locs; |
| 18987 | |
| 18988 | location_t macro = LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table); |
| 18989 | macro -= cfg->macro_locs; |
| 18990 | |
| 18991 | if (ordinary < LINE_MAP_MAX_LOCATION_WITH_COLS |
| 18992 | && macro >= LINE_MAP_MAX_LOCATION) |
| 18993 | /* OK, we have enough locations. */ |
| 18994 | return true; |
| 18995 | |
| 18996 | ordinary_locs.first = ordinary_locs.second = 0; |
| 18997 | macro_locs.first = macro_locs.second = 0; |
| 18998 | |
| 18999 | spans.report_location_exhaustion (loc); |
| 19000 | |
| 19001 | return false; |
| 19002 | } |
| 19003 | |
| 19004 | /* Write & read the location maps. Not called if there are no |
| 19005 | locations. */ |
| 19006 | |
| 19007 | void |
| 19008 | module_state::write_ordinary_maps (elf_out *to, range_t &info, |
| 19009 | bool has_partitions, unsigned *crc_p) |
| 19010 | { |
| 19011 | dump () && dump ("Writing ordinary location maps" ); |
| 19012 | dump.indent (); |
| 19013 | |
| 19014 | vec<const char *> filenames; |
| 19015 | filenames.create (nelems: 20); |
| 19016 | |
| 19017 | /* Determine the unique filenames. */ |
| 19018 | const line_map_ordinary *current = nullptr; |
| 19019 | for (auto iter = ord_loc_remap->begin (), end = ord_loc_remap->end (); |
| 19020 | iter != end; ++iter) |
| 19021 | if (iter->src != current) |
| 19022 | { |
| 19023 | current = iter->src; |
| 19024 | const char *fname = ORDINARY_MAP_FILE_NAME (ord_map: iter->src); |
| 19025 | |
| 19026 | /* We should never find a module linemap in an interval. */ |
| 19027 | gcc_checking_assert (!MAP_MODULE_P (iter->src)); |
| 19028 | |
| 19029 | /* We expect very few filenames, so just an array. |
| 19030 | (Not true when headers are still in play :() */ |
| 19031 | for (unsigned jx = filenames.length (); jx--;) |
| 19032 | { |
| 19033 | const char *name = filenames[jx]; |
| 19034 | if (0 == strcmp (s1: name, s2: fname)) |
| 19035 | { |
| 19036 | /* Reset the linemap's name, because for things like |
| 19037 | preprocessed input we could have multiple instances |
| 19038 | of the same name, and we'd rather not percolate |
| 19039 | that. */ |
| 19040 | const_cast<line_map_ordinary *> (iter->src)->to_file = name; |
| 19041 | fname = NULL; |
| 19042 | break; |
| 19043 | } |
| 19044 | } |
| 19045 | if (fname) |
| 19046 | filenames.safe_push (obj: fname); |
| 19047 | } |
| 19048 | |
| 19049 | bytes_out sec (to); |
| 19050 | sec.begin (); |
| 19051 | |
| 19052 | /* Write the filenames. */ |
| 19053 | unsigned len = filenames.length (); |
| 19054 | sec.u (v: len); |
| 19055 | dump () && dump ("%u source file names" , len); |
| 19056 | for (unsigned ix = 0; ix != len; ix++) |
| 19057 | { |
| 19058 | const char *fname = filenames[ix]; |
| 19059 | dump (dumper::LOCATION) && dump ("Source file[%u]=%s" , ix, fname); |
| 19060 | sec.str (ptr: fname); |
| 19061 | } |
| 19062 | |
| 19063 | sec.loc (l: info.first); /* Num maps. */ |
| 19064 | const ord_loc_info *base = nullptr; |
| 19065 | for (auto iter = ord_loc_remap->begin (), end = ord_loc_remap->end (); |
| 19066 | iter != end; ++iter) |
| 19067 | { |
| 19068 | dump (dumper::LOCATION) |
| 19069 | && dump ("Span:%K ordinary [%K+%K,+%K)->[%K,+%K)" , |
| 19070 | (location_t) (iter - ord_loc_remap->begin ()), |
| 19071 | MAP_START_LOCATION (map: iter->src), |
| 19072 | iter->offset, iter->span, iter->remap, |
| 19073 | iter->span); |
| 19074 | |
| 19075 | if (!base || iter->src != base->src) |
| 19076 | base = iter; |
| 19077 | sec.loc (l: iter->offset - base->offset); |
| 19078 | if (base == iter) |
| 19079 | { |
| 19080 | sec.u (v: iter->src->sysp); |
| 19081 | sec.u (v: iter->src->m_range_bits); |
| 19082 | sec.u (v: iter->src->m_column_and_range_bits - iter->src->m_range_bits); |
| 19083 | |
| 19084 | const char *fname = ORDINARY_MAP_FILE_NAME (ord_map: iter->src); |
| 19085 | for (unsigned ix = 0; ix != filenames.length (); ix++) |
| 19086 | if (filenames[ix] == fname) |
| 19087 | { |
| 19088 | sec.u (v: ix); |
| 19089 | break; |
| 19090 | } |
| 19091 | unsigned line = ORDINARY_MAP_STARTING_LINE_NUMBER (ord_map: iter->src); |
| 19092 | line += iter->offset >> iter->src->m_column_and_range_bits; |
| 19093 | sec.u (v: line); |
| 19094 | } |
| 19095 | sec.loc (l: iter->remap); |
| 19096 | if (base == iter) |
| 19097 | { |
| 19098 | /* Write the included from location, which means reading it |
| 19099 | while reading in the ordinary maps. So we'd better not |
| 19100 | be getting ahead of ourselves. */ |
| 19101 | location_t from = linemap_included_from (ord_map: iter->src); |
| 19102 | gcc_checking_assert (from < MAP_START_LOCATION (iter->src)); |
| 19103 | if (from != UNKNOWN_LOCATION && has_partitions) |
| 19104 | { |
| 19105 | /* A partition's span will have a from pointing at a |
| 19106 | MODULE_INC. Find that map's from. */ |
| 19107 | line_map_ordinary const *fmap |
| 19108 | = linemap_check_ordinary (map: linemap_lookup (line_table, from)); |
| 19109 | if (MAP_MODULE_P (map: fmap)) |
| 19110 | from = linemap_included_from (ord_map: fmap); |
| 19111 | } |
| 19112 | write_location (sec, loc: from); |
| 19113 | } |
| 19114 | } |
| 19115 | |
| 19116 | filenames.release (); |
| 19117 | |
| 19118 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".olm" ), crc_ptr: crc_p); |
| 19119 | dump.outdent (); |
| 19120 | } |
| 19121 | |
| 19122 | /* Return the prefix to use for dumping a #pragma diagnostic change to DK. */ |
| 19123 | |
| 19124 | static const char * |
| 19125 | dk_string (enum diagnostics::kind dk) |
| 19126 | { |
| 19127 | gcc_assert (dk > diagnostics::kind::unspecified |
| 19128 | && dk < diagnostics::kind::last_diagnostic_kind); |
| 19129 | if (dk == diagnostics::kind::ignored) |
| 19130 | /* diagnostics/kinds.def has an empty string for ignored. */ |
| 19131 | return "ignored: " ; |
| 19132 | else |
| 19133 | return diagnostics::get_text_for_kind (dk); |
| 19134 | } |
| 19135 | |
| 19136 | /* Dump one #pragma GCC diagnostic entry. */ |
| 19137 | |
| 19138 | static bool |
| 19139 | dump_dc_change (unsigned index, unsigned opt, enum diagnostics::kind dk) |
| 19140 | { |
| 19141 | if (dk == diagnostics::kind::pop) |
| 19142 | return dump (" Index %u: pop from %d" , index, opt); |
| 19143 | else |
| 19144 | return dump (" Index %u: %s%s" , index, dk_string (dk), |
| 19145 | cl_options[opt].opt_text); |
| 19146 | } |
| 19147 | |
| 19148 | /* Write out any #pragma GCC diagnostic info to the .dgc section. */ |
| 19149 | |
| 19150 | void |
| 19151 | module_state::write_diagnostic_classification (elf_out *to, |
| 19152 | diagnostics::context *dc, |
| 19153 | unsigned *crc_p) |
| 19154 | { |
| 19155 | auto &changes = dc->get_classification_history (); |
| 19156 | |
| 19157 | bytes_out sec (to); |
| 19158 | if (sec.streaming_p ()) |
| 19159 | { |
| 19160 | sec.begin (); |
| 19161 | dump () && dump ("Writing diagnostic change locations" ); |
| 19162 | dump.indent (); |
| 19163 | } |
| 19164 | |
| 19165 | unsigned len = changes.length (); |
| 19166 | |
| 19167 | /* We don't want to write out any entries that came from one of our imports. |
| 19168 | But then we need to adjust the total, and change diagnostics::kind::pop |
| 19169 | targets to match the index in our actual output. So remember how many |
| 19170 | lines we had skipped at each step, where -1 means this line itself |
| 19171 | is skipped. */ |
| 19172 | int skips = 0; |
| 19173 | auto_vec<int> skips_at (len); |
| 19174 | skips_at.safe_grow (len); |
| 19175 | |
| 19176 | for (unsigned i = 0; i < len; ++i) |
| 19177 | { |
| 19178 | const auto &c = changes[i]; |
| 19179 | skips_at[i] = skips; |
| 19180 | if (linemap_location_from_module_p (line_table, c.location)) |
| 19181 | { |
| 19182 | ++skips; |
| 19183 | skips_at[i] = -1; |
| 19184 | continue; |
| 19185 | } |
| 19186 | } |
| 19187 | |
| 19188 | if (sec.streaming_p ()) |
| 19189 | { |
| 19190 | sec.u (v: len - skips); |
| 19191 | dump () && dump ("Diagnostic changes: %u" , len - skips); |
| 19192 | } |
| 19193 | |
| 19194 | for (unsigned i = 0; i < len; ++i) |
| 19195 | { |
| 19196 | if (skips_at[i] == -1) |
| 19197 | continue; |
| 19198 | |
| 19199 | const auto &c = changes[i]; |
| 19200 | write_location (sec, loc: c.location); |
| 19201 | if (sec.streaming_p ()) |
| 19202 | { |
| 19203 | unsigned opt = c.option; |
| 19204 | if (c.kind == diagnostics::kind::pop) |
| 19205 | opt -= skips_at[opt]; |
| 19206 | sec.u (v: opt); |
| 19207 | sec.u (v: static_cast<unsigned> (c.kind)); |
| 19208 | dump () && dump_dc_change (index: i - skips_at[i], opt, dk: c.kind); |
| 19209 | } |
| 19210 | } |
| 19211 | |
| 19212 | if (sec.streaming_p ()) |
| 19213 | { |
| 19214 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".dgc" ), crc_ptr: crc_p); |
| 19215 | dump.outdent (); |
| 19216 | } |
| 19217 | } |
| 19218 | |
| 19219 | /* Read any #pragma GCC diagnostic info from the .dgc section. */ |
| 19220 | |
| 19221 | bool |
| 19222 | module_state::read_diagnostic_classification (diagnostics::context *dc) |
| 19223 | { |
| 19224 | bytes_in sec; |
| 19225 | |
| 19226 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".dgc" )) |
| 19227 | return false; |
| 19228 | |
| 19229 | dump () && dump ("Reading diagnostic change locations" ); |
| 19230 | dump.indent (); |
| 19231 | |
| 19232 | unsigned len = sec.u (); |
| 19233 | dump () && dump ("Diagnostic changes: %u" , len); |
| 19234 | |
| 19235 | auto &changes = dc->get_classification_history (); |
| 19236 | int offset = changes.length (); |
| 19237 | changes.reserve (nelems: len + 1); |
| 19238 | for (unsigned i = 0; i < len; ++i) |
| 19239 | { |
| 19240 | location_t loc = read_location (sec); |
| 19241 | int opt = sec.u (); |
| 19242 | enum diagnostics::kind kind = (enum diagnostics::kind) sec.u (); |
| 19243 | if (kind == diagnostics::kind::pop) |
| 19244 | /* For a pop, opt is the 'changes' index to return to. */ |
| 19245 | opt += offset; |
| 19246 | changes.quick_push (obj: { .location: loc, .option: opt, .kind: kind }); |
| 19247 | dump () && dump_dc_change (index: changes.length () - 1, opt, dk: kind); |
| 19248 | } |
| 19249 | |
| 19250 | /* Did the import pop all its diagnostic changes? */ |
| 19251 | bool last_was_reset = (len == 0); |
| 19252 | if (len) |
| 19253 | for (int i = changes.length () - 1; ; --i) |
| 19254 | { |
| 19255 | gcc_checking_assert (i >= offset); |
| 19256 | |
| 19257 | const auto &c = changes[i]; |
| 19258 | if (c.kind != diagnostics::kind::pop) |
| 19259 | break; |
| 19260 | else if (c.option == offset) |
| 19261 | { |
| 19262 | last_was_reset = true; |
| 19263 | break; |
| 19264 | } |
| 19265 | else |
| 19266 | /* As in update_effective_level_from_pragmas, the loop will decrement |
| 19267 | i so we actually jump to c.option - 1. */ |
| 19268 | i = c.option; |
| 19269 | } |
| 19270 | if (!last_was_reset) |
| 19271 | { |
| 19272 | /* It didn't, so add a pop at its last location to avoid affecting later |
| 19273 | imports. */ |
| 19274 | location_t last_loc = ordinary_locs.first + ordinary_locs.second - 1; |
| 19275 | changes.quick_push (obj: { .location: last_loc, .option: offset, .kind: diagnostics::kind::pop }); |
| 19276 | dump () && dump (" Adding final pop from index %d" , offset); |
| 19277 | } |
| 19278 | |
| 19279 | dump.outdent (); |
| 19280 | if (!sec.end (src: from ())) |
| 19281 | return false; |
| 19282 | |
| 19283 | return true; |
| 19284 | } |
| 19285 | |
| 19286 | void |
| 19287 | module_state::write_macro_maps (elf_out *to, range_t &info, unsigned *crc_p) |
| 19288 | { |
| 19289 | dump () && dump ("Writing macro location maps" ); |
| 19290 | dump.indent (); |
| 19291 | |
| 19292 | bytes_out sec (to); |
| 19293 | sec.begin (); |
| 19294 | |
| 19295 | dump () && dump ("Macro maps:%K" , info.second); |
| 19296 | sec.loc (l: info.second); |
| 19297 | |
| 19298 | line_map_uint_t macro_num = 0; |
| 19299 | for (auto iter = macro_loc_remap->end (), begin = macro_loc_remap->begin (); |
| 19300 | iter-- != begin;) |
| 19301 | { |
| 19302 | auto mac = iter->src; |
| 19303 | sec.loc (l: iter->remap); |
| 19304 | sec.u (v: mac->n_tokens); |
| 19305 | sec.cpp_node (node: mac->macro); |
| 19306 | write_location (sec, loc: mac->m_expansion); |
| 19307 | const location_t *locs = mac->macro_locations; |
| 19308 | /* There are lots of identical runs. */ |
| 19309 | location_t prev = UNKNOWN_LOCATION; |
| 19310 | unsigned count = 0; |
| 19311 | unsigned runs = 0; |
| 19312 | for (unsigned jx = mac->n_tokens * 2; jx--;) |
| 19313 | { |
| 19314 | location_t tok_loc = locs[jx]; |
| 19315 | if (tok_loc == prev) |
| 19316 | { |
| 19317 | count++; |
| 19318 | continue; |
| 19319 | } |
| 19320 | runs++; |
| 19321 | sec.u (v: count); |
| 19322 | count = 1; |
| 19323 | prev = tok_loc; |
| 19324 | write_location (sec, loc: tok_loc); |
| 19325 | } |
| 19326 | sec.u (v: count); |
| 19327 | dump (dumper::LOCATION) |
| 19328 | && dump ("Macro:%K %I %u/%u*2 locations [%K,%K)->%K" , |
| 19329 | macro_num, identifier (node: mac->macro), |
| 19330 | runs, mac->n_tokens, |
| 19331 | MAP_START_LOCATION (map: mac), |
| 19332 | MAP_START_LOCATION (map: mac) + mac->n_tokens, |
| 19333 | iter->remap); |
| 19334 | macro_num++; |
| 19335 | } |
| 19336 | gcc_assert (macro_num == info.second); |
| 19337 | |
| 19338 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".mlm" ), crc_ptr: crc_p); |
| 19339 | dump.outdent (); |
| 19340 | } |
| 19341 | |
| 19342 | bool |
| 19343 | module_state::read_ordinary_maps (line_map_uint_t num_ord_locs, |
| 19344 | unsigned range_bits) |
| 19345 | { |
| 19346 | bytes_in sec; |
| 19347 | |
| 19348 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".olm" )) |
| 19349 | return false; |
| 19350 | dump () && dump ("Reading ordinary location maps" ); |
| 19351 | dump.indent (); |
| 19352 | |
| 19353 | /* Read the filename table. */ |
| 19354 | unsigned len = sec.u (); |
| 19355 | dump () && dump ("%u source file names" , len); |
| 19356 | vec<const char *> filenames; |
| 19357 | filenames.create (nelems: len); |
| 19358 | for (unsigned ix = 0; ix != len; ix++) |
| 19359 | { |
| 19360 | size_t l; |
| 19361 | const char *buf = sec.str (len_p: &l); |
| 19362 | char *fname = XNEWVEC (char, l + 1); |
| 19363 | memcpy (dest: fname, src: buf, n: l + 1); |
| 19364 | dump (dumper::LOCATION) && dump ("Source file[%u]=%s" , ix, fname); |
| 19365 | /* We leak these names into the line-map table. But it |
| 19366 | doesn't own them. */ |
| 19367 | filenames.quick_push (obj: fname); |
| 19368 | } |
| 19369 | |
| 19370 | line_map_uint_t num_ordinary = sec.loc (); |
| 19371 | dump () && dump ("Ordinary maps:%K, range_bits:%u" , |
| 19372 | num_ordinary, range_bits); |
| 19373 | |
| 19374 | location_t offset = line_table->highest_location + 1; |
| 19375 | offset += ((loc_one << range_bits) - 1); |
| 19376 | offset &= ~((loc_one << range_bits) - 1); |
| 19377 | ordinary_locs.first = offset; |
| 19378 | |
| 19379 | bool propagated = spans.maybe_propagate (import: this, hwm: offset); |
| 19380 | line_map_ordinary *maps = static_cast<line_map_ordinary *> |
| 19381 | (line_map_new_raw (line_table, false, num_ordinary)); |
| 19382 | |
| 19383 | const line_map_ordinary *base = nullptr; |
| 19384 | for (line_map_uint_t ix = 0; ix != num_ordinary && !sec.get_overrun (); ix++) |
| 19385 | { |
| 19386 | line_map_ordinary *map = &maps[ix]; |
| 19387 | |
| 19388 | location_t offset = sec.loc (); |
| 19389 | if (!offset) |
| 19390 | { |
| 19391 | map->reason = LC_RENAME; |
| 19392 | map->sysp = sec.u (); |
| 19393 | map->m_range_bits = sec.u (); |
| 19394 | map->m_column_and_range_bits = sec.u () + map->m_range_bits; |
| 19395 | unsigned fnum = sec.u (); |
| 19396 | map->to_file = (fnum < filenames.length () ? filenames[fnum] : "" ); |
| 19397 | map->to_line = sec.u (); |
| 19398 | base = map; |
| 19399 | } |
| 19400 | else |
| 19401 | { |
| 19402 | *map = *base; |
| 19403 | map->to_line += offset >> map->m_column_and_range_bits; |
| 19404 | } |
| 19405 | location_t remap = sec.loc (); |
| 19406 | map->start_location = remap + ordinary_locs.first; |
| 19407 | if (base == map) |
| 19408 | { |
| 19409 | /* Root the outermost map at our location. */ |
| 19410 | ordinary_locs.second = remap; |
| 19411 | location_t from = read_location (sec); |
| 19412 | map->included_from = from != UNKNOWN_LOCATION ? from : loc; |
| 19413 | } |
| 19414 | } |
| 19415 | |
| 19416 | ordinary_locs.second = num_ord_locs; |
| 19417 | /* highest_location is the one handed out, not the next one to |
| 19418 | hand out. */ |
| 19419 | line_table->highest_location = ordinary_locs.first + ordinary_locs.second - 1; |
| 19420 | |
| 19421 | if (line_table->highest_location >= LINE_MAP_MAX_LOCATION_WITH_COLS) |
| 19422 | /* We shouldn't run out of locations, as we checked before |
| 19423 | starting. */ |
| 19424 | sec.set_overrun (); |
| 19425 | dump () && dump ("Ordinary location [%K,+%K)" , |
| 19426 | ordinary_locs.first, ordinary_locs.second); |
| 19427 | |
| 19428 | if (propagated) |
| 19429 | spans.close (); |
| 19430 | |
| 19431 | filenames.release (); |
| 19432 | |
| 19433 | dump.outdent (); |
| 19434 | if (!sec.end (src: from ())) |
| 19435 | return false; |
| 19436 | |
| 19437 | return true; |
| 19438 | } |
| 19439 | |
| 19440 | bool |
| 19441 | module_state::read_macro_maps (line_map_uint_t num_macro_locs) |
| 19442 | { |
| 19443 | bytes_in sec; |
| 19444 | |
| 19445 | if (!sec.begin (loc, source: from (), MOD_SNAME_PFX ".mlm" )) |
| 19446 | return false; |
| 19447 | dump () && dump ("Reading macro location maps" ); |
| 19448 | dump.indent (); |
| 19449 | |
| 19450 | line_map_uint_t num_macros = sec.loc (); |
| 19451 | dump () && dump ("Macro maps:%K locs:%K" , |
| 19452 | num_macros, num_macro_locs); |
| 19453 | |
| 19454 | bool propagated = spans.maybe_propagate (import: this, |
| 19455 | hwm: line_table->highest_location + 1); |
| 19456 | |
| 19457 | location_t offset = LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table); |
| 19458 | macro_locs.second = num_macro_locs; |
| 19459 | macro_locs.first = offset - num_macro_locs; |
| 19460 | |
| 19461 | dump () && dump ("Macro loc delta %K" , offset); |
| 19462 | dump () && dump ("Macro locations [%K,%K)" , |
| 19463 | macro_locs.first, macro_locs.second); |
| 19464 | |
| 19465 | for (line_map_uint_t ix = 0; ix != num_macros && !sec.get_overrun (); ix++) |
| 19466 | { |
| 19467 | location_t offset = sec.loc (); |
| 19468 | unsigned n_tokens = sec.u (); |
| 19469 | cpp_hashnode *node = sec.cpp_node (); |
| 19470 | location_t exp_loc = read_location (sec); |
| 19471 | |
| 19472 | const line_map_macro *macro |
| 19473 | = linemap_enter_macro (line_table, node, exp_loc, n_tokens); |
| 19474 | if (!macro) |
| 19475 | /* We shouldn't run out of locations, as we checked that we |
| 19476 | had enough before starting. */ |
| 19477 | break; |
| 19478 | gcc_checking_assert (MAP_START_LOCATION (macro) |
| 19479 | == offset + macro_locs.first); |
| 19480 | |
| 19481 | location_t *locs = macro->macro_locations; |
| 19482 | location_t tok_loc = UNKNOWN_LOCATION; |
| 19483 | unsigned count = sec.u (); |
| 19484 | unsigned runs = 0; |
| 19485 | for (unsigned jx = macro->n_tokens * 2; jx-- && !sec.get_overrun ();) |
| 19486 | { |
| 19487 | while (!count-- && !sec.get_overrun ()) |
| 19488 | { |
| 19489 | runs++; |
| 19490 | tok_loc = read_location (sec); |
| 19491 | count = sec.u (); |
| 19492 | } |
| 19493 | locs[jx] = tok_loc; |
| 19494 | } |
| 19495 | if (count) |
| 19496 | sec.set_overrun (); |
| 19497 | dump (dumper::LOCATION) |
| 19498 | && dump ("Macro:%K %I %u/%u*2 locations [%K,%K)" , |
| 19499 | ix, identifier (node), runs, n_tokens, |
| 19500 | MAP_START_LOCATION (map: macro), |
| 19501 | MAP_START_LOCATION (map: macro) + n_tokens); |
| 19502 | } |
| 19503 | |
| 19504 | dump () && dump ("Macro location lwm:%K" , macro_locs.first); |
| 19505 | if (propagated) |
| 19506 | spans.close (); |
| 19507 | |
| 19508 | dump.outdent (); |
| 19509 | if (!sec.end (src: from ())) |
| 19510 | return false; |
| 19511 | |
| 19512 | return true; |
| 19513 | } |
| 19514 | |
| 19515 | /* Serialize the definition of MACRO. */ |
| 19516 | |
| 19517 | void |
| 19518 | module_state::write_define (bytes_out &sec, const cpp_macro *macro) |
| 19519 | { |
| 19520 | sec.u (v: macro->count); |
| 19521 | |
| 19522 | bytes_out::bits_out bits = sec.stream_bits (); |
| 19523 | bits.b (x: macro->fun_like); |
| 19524 | bits.b (x: macro->variadic); |
| 19525 | bits.b (x: macro->syshdr); |
| 19526 | bits.bflush (); |
| 19527 | |
| 19528 | write_location (sec, loc: macro->line); |
| 19529 | if (macro->fun_like) |
| 19530 | { |
| 19531 | sec.u (v: macro->paramc); |
| 19532 | const cpp_hashnode *const *parms = macro->parm.params; |
| 19533 | for (unsigned ix = 0; ix != macro->paramc; ix++) |
| 19534 | sec.cpp_node (node: parms[ix]); |
| 19535 | } |
| 19536 | |
| 19537 | unsigned len = 0; |
| 19538 | for (unsigned ix = 0; ix != macro->count; ix++) |
| 19539 | { |
| 19540 | const cpp_token *token = ¯o->exp.tokens[ix]; |
| 19541 | write_location (sec, loc: token->src_loc); |
| 19542 | sec.u (v: token->type); |
| 19543 | sec.u (v: token->flags); |
| 19544 | switch (cpp_token_val_index (tok: token)) |
| 19545 | { |
| 19546 | default: |
| 19547 | gcc_unreachable (); |
| 19548 | |
| 19549 | case CPP_TOKEN_FLD_ARG_NO: |
| 19550 | /* An argument reference. */ |
| 19551 | sec.u (v: token->val.macro_arg.arg_no); |
| 19552 | sec.cpp_node (node: token->val.macro_arg.spelling); |
| 19553 | break; |
| 19554 | |
| 19555 | case CPP_TOKEN_FLD_NODE: |
| 19556 | /* An identifier. */ |
| 19557 | sec.cpp_node (node: token->val.node.node); |
| 19558 | if (token->val.node.spelling == token->val.node.node) |
| 19559 | /* The spelling will usually be the same. so optimize |
| 19560 | that. */ |
| 19561 | sec.str (NULL, len: 0); |
| 19562 | else |
| 19563 | sec.cpp_node (node: token->val.node.spelling); |
| 19564 | break; |
| 19565 | |
| 19566 | case CPP_TOKEN_FLD_NONE: |
| 19567 | break; |
| 19568 | |
| 19569 | case CPP_TOKEN_FLD_STR: |
| 19570 | /* A string, number or comment. Not always NUL terminated, |
| 19571 | we stream out in a single contatenation with embedded |
| 19572 | NULs as that's a safe default. */ |
| 19573 | len += token->val.str.len + 1; |
| 19574 | sec.u (v: token->val.str.len); |
| 19575 | break; |
| 19576 | |
| 19577 | case CPP_TOKEN_FLD_SOURCE: |
| 19578 | case CPP_TOKEN_FLD_TOKEN_NO: |
| 19579 | case CPP_TOKEN_FLD_PRAGMA: |
| 19580 | /* These do not occur inside a macro itself. */ |
| 19581 | gcc_unreachable (); |
| 19582 | } |
| 19583 | } |
| 19584 | |
| 19585 | if (len) |
| 19586 | { |
| 19587 | char *ptr = reinterpret_cast<char *> (sec.buf (len)); |
| 19588 | len = 0; |
| 19589 | for (unsigned ix = 0; ix != macro->count; ix++) |
| 19590 | { |
| 19591 | const cpp_token *token = ¯o->exp.tokens[ix]; |
| 19592 | if (cpp_token_val_index (tok: token) == CPP_TOKEN_FLD_STR) |
| 19593 | { |
| 19594 | memcpy (dest: ptr + len, src: token->val.str.text, |
| 19595 | n: token->val.str.len); |
| 19596 | len += token->val.str.len; |
| 19597 | ptr[len++] = 0; |
| 19598 | } |
| 19599 | } |
| 19600 | } |
| 19601 | } |
| 19602 | |
| 19603 | /* Read a macro definition. */ |
| 19604 | |
| 19605 | cpp_macro * |
| 19606 | module_state::read_define (bytes_in &sec, cpp_reader *reader) const |
| 19607 | { |
| 19608 | unsigned count = sec.u (); |
| 19609 | /* We rely on knowing cpp_reader's hash table is ident_hash, and |
| 19610 | its subobject allocator is stringpool_ggc_alloc and that is just |
| 19611 | a wrapper for ggc_alloc_atomic. */ |
| 19612 | cpp_macro *macro |
| 19613 | = (cpp_macro *)ggc_alloc_atomic (s: sizeof (cpp_macro) |
| 19614 | + sizeof (cpp_token) * (count - !!count)); |
| 19615 | memset (s: macro, c: 0, n: sizeof (cpp_macro) + sizeof (cpp_token) * (count - !!count)); |
| 19616 | |
| 19617 | macro->count = count; |
| 19618 | macro->kind = cmk_macro; |
| 19619 | macro->imported_p = true; |
| 19620 | |
| 19621 | bytes_in::bits_in bits = sec.stream_bits (); |
| 19622 | macro->fun_like = bits.b (); |
| 19623 | macro->variadic = bits.b (); |
| 19624 | macro->syshdr = bits.b (); |
| 19625 | bits.bflush (); |
| 19626 | |
| 19627 | macro->line = read_location (sec); |
| 19628 | |
| 19629 | if (macro->fun_like) |
| 19630 | { |
| 19631 | unsigned paramc = sec.u (); |
| 19632 | cpp_hashnode **params |
| 19633 | = (cpp_hashnode **)ggc_alloc_atomic (s: sizeof (cpp_hashnode *) * paramc); |
| 19634 | macro->paramc = paramc; |
| 19635 | macro->parm.params = params; |
| 19636 | for (unsigned ix = 0; ix != paramc; ix++) |
| 19637 | params[ix] = sec.cpp_node (); |
| 19638 | } |
| 19639 | |
| 19640 | unsigned len = 0; |
| 19641 | for (unsigned ix = 0; ix != count && !sec.get_overrun (); ix++) |
| 19642 | { |
| 19643 | cpp_token *token = ¯o->exp.tokens[ix]; |
| 19644 | token->src_loc = read_location (sec); |
| 19645 | token->type = cpp_ttype (sec.u ()); |
| 19646 | token->flags = sec.u (); |
| 19647 | switch (cpp_token_val_index (tok: token)) |
| 19648 | { |
| 19649 | default: |
| 19650 | sec.set_overrun (); |
| 19651 | break; |
| 19652 | |
| 19653 | case CPP_TOKEN_FLD_ARG_NO: |
| 19654 | /* An argument reference. */ |
| 19655 | { |
| 19656 | unsigned arg_no = sec.u (); |
| 19657 | if (arg_no - 1 >= macro->paramc) |
| 19658 | sec.set_overrun (); |
| 19659 | token->val.macro_arg.arg_no = arg_no; |
| 19660 | token->val.macro_arg.spelling = sec.cpp_node (); |
| 19661 | } |
| 19662 | break; |
| 19663 | |
| 19664 | case CPP_TOKEN_FLD_NODE: |
| 19665 | /* An identifier. */ |
| 19666 | token->val.node.node = sec.cpp_node (); |
| 19667 | token->val.node.spelling = sec.cpp_node (); |
| 19668 | if (!token->val.node.spelling) |
| 19669 | token->val.node.spelling = token->val.node.node; |
| 19670 | break; |
| 19671 | |
| 19672 | case CPP_TOKEN_FLD_NONE: |
| 19673 | break; |
| 19674 | |
| 19675 | case CPP_TOKEN_FLD_STR: |
| 19676 | /* A string, number or comment. */ |
| 19677 | token->val.str.len = sec.u (); |
| 19678 | len += token->val.str.len + 1; |
| 19679 | break; |
| 19680 | } |
| 19681 | } |
| 19682 | |
| 19683 | if (len) |
| 19684 | if (const char *ptr = reinterpret_cast<const char *> (sec.buf (len))) |
| 19685 | { |
| 19686 | /* There should be a final NUL. */ |
| 19687 | if (ptr[len-1]) |
| 19688 | sec.set_overrun (); |
| 19689 | /* cpp_alloc_token_string will add a final NUL. */ |
| 19690 | const unsigned char *buf |
| 19691 | = cpp_alloc_token_string (reader, (const unsigned char *)ptr, len - 1); |
| 19692 | len = 0; |
| 19693 | for (unsigned ix = 0; ix != count && !sec.get_overrun (); ix++) |
| 19694 | { |
| 19695 | cpp_token *token = ¯o->exp.tokens[ix]; |
| 19696 | if (cpp_token_val_index (tok: token) == CPP_TOKEN_FLD_STR) |
| 19697 | { |
| 19698 | token->val.str.text = buf + len; |
| 19699 | len += token->val.str.len; |
| 19700 | if (buf[len++]) |
| 19701 | sec.set_overrun (); |
| 19702 | } |
| 19703 | } |
| 19704 | } |
| 19705 | |
| 19706 | if (sec.get_overrun ()) |
| 19707 | return NULL; |
| 19708 | return macro; |
| 19709 | } |
| 19710 | |
| 19711 | /* Exported macro data. */ |
| 19712 | struct GTY(()) macro_export { |
| 19713 | cpp_macro *def; |
| 19714 | location_t undef_loc; |
| 19715 | |
| 19716 | macro_export () |
| 19717 | :def (NULL), undef_loc (UNKNOWN_LOCATION) |
| 19718 | { |
| 19719 | } |
| 19720 | }; |
| 19721 | |
| 19722 | /* Imported macro data. */ |
| 19723 | class macro_import { |
| 19724 | public: |
| 19725 | struct slot { |
| 19726 | #if defined (WORDS_BIGENDIAN) && SIZEOF_VOID_P == 8 |
| 19727 | int offset; |
| 19728 | #endif |
| 19729 | /* We need to ensure we don't use the LSB for representation, as |
| 19730 | that's the union discriminator below. */ |
| 19731 | unsigned bits; |
| 19732 | |
| 19733 | #if !(defined (WORDS_BIGENDIAN) && SIZEOF_VOID_P == 8) |
| 19734 | int offset; |
| 19735 | #endif |
| 19736 | |
| 19737 | public: |
| 19738 | enum Layout { |
| 19739 | L_DEF = 1, |
| 19740 | L_UNDEF = 2, |
| 19741 | L_BOTH = 3, |
| 19742 | L_MODULE_SHIFT = 2 |
| 19743 | }; |
| 19744 | |
| 19745 | public: |
| 19746 | /* Not a regular ctor, because we put it in a union, and that's |
| 19747 | not allowed in C++ 98. */ |
| 19748 | static slot ctor (unsigned module, unsigned defness) |
| 19749 | { |
| 19750 | gcc_checking_assert (defness); |
| 19751 | slot s; |
| 19752 | s.bits = defness | (module << L_MODULE_SHIFT); |
| 19753 | s.offset = -1; |
| 19754 | return s; |
| 19755 | } |
| 19756 | |
| 19757 | public: |
| 19758 | unsigned get_defness () const |
| 19759 | { |
| 19760 | return bits & L_BOTH; |
| 19761 | } |
| 19762 | unsigned get_module () const |
| 19763 | { |
| 19764 | return bits >> L_MODULE_SHIFT; |
| 19765 | } |
| 19766 | void become_undef () |
| 19767 | { |
| 19768 | bits &= ~unsigned (L_DEF); |
| 19769 | bits |= unsigned (L_UNDEF); |
| 19770 | } |
| 19771 | }; |
| 19772 | |
| 19773 | private: |
| 19774 | typedef vec<slot, va_heap, vl_embed> ary_t; |
| 19775 | union either { |
| 19776 | /* Discriminated by bits 0|1 != 0. The expected case is that |
| 19777 | there will be exactly one slot per macro, hence the effort of |
| 19778 | packing that. */ |
| 19779 | ary_t *ary; |
| 19780 | slot single; |
| 19781 | } u; |
| 19782 | |
| 19783 | public: |
| 19784 | macro_import () |
| 19785 | { |
| 19786 | u.ary = NULL; |
| 19787 | } |
| 19788 | |
| 19789 | private: |
| 19790 | bool single_p () const |
| 19791 | { |
| 19792 | return u.single.bits & slot::L_BOTH; |
| 19793 | } |
| 19794 | bool occupied_p () const |
| 19795 | { |
| 19796 | return u.ary != NULL; |
| 19797 | } |
| 19798 | |
| 19799 | public: |
| 19800 | unsigned length () const |
| 19801 | { |
| 19802 | gcc_checking_assert (occupied_p ()); |
| 19803 | return single_p () ? 1 : u.ary->length (); |
| 19804 | } |
| 19805 | slot &operator[] (unsigned ix) |
| 19806 | { |
| 19807 | gcc_checking_assert (occupied_p ()); |
| 19808 | if (single_p ()) |
| 19809 | { |
| 19810 | gcc_checking_assert (!ix); |
| 19811 | return u.single; |
| 19812 | } |
| 19813 | else |
| 19814 | return (*u.ary)[ix]; |
| 19815 | } |
| 19816 | |
| 19817 | public: |
| 19818 | slot &exported (); |
| 19819 | slot &append (unsigned module, unsigned defness); |
| 19820 | }; |
| 19821 | |
| 19822 | /* O is a new import to append to the list for. If we're an empty |
| 19823 | set, initialize us. */ |
| 19824 | |
| 19825 | macro_import::slot & |
| 19826 | macro_import::append (unsigned module, unsigned defness) |
| 19827 | { |
| 19828 | if (!occupied_p ()) |
| 19829 | { |
| 19830 | u.single = slot::ctor (module, defness); |
| 19831 | return u.single; |
| 19832 | } |
| 19833 | else |
| 19834 | { |
| 19835 | bool single = single_p (); |
| 19836 | ary_t *m = single ? NULL : u.ary; |
| 19837 | vec_safe_reserve (v&: m, nelems: 1 + single); |
| 19838 | if (single) |
| 19839 | m->quick_push (obj: u.single); |
| 19840 | u.ary = m; |
| 19841 | return *u.ary->quick_push (obj: slot::ctor (module, defness)); |
| 19842 | } |
| 19843 | } |
| 19844 | |
| 19845 | /* We're going to export something. Make sure the first import slot |
| 19846 | is us. */ |
| 19847 | |
| 19848 | macro_import::slot & |
| 19849 | macro_import::exported () |
| 19850 | { |
| 19851 | if (occupied_p () && !(*this)[0].get_module ()) |
| 19852 | { |
| 19853 | slot &res = (*this)[0]; |
| 19854 | res.bits |= slot::L_DEF; |
| 19855 | return res; |
| 19856 | } |
| 19857 | |
| 19858 | slot *a = &append (module: 0, defness: slot::L_DEF); |
| 19859 | if (!single_p ()) |
| 19860 | { |
| 19861 | slot &f = (*this)[0]; |
| 19862 | std::swap (a&: f, b&: *a); |
| 19863 | a = &f; |
| 19864 | } |
| 19865 | return *a; |
| 19866 | } |
| 19867 | |
| 19868 | /* The import (&exported) macros. cpp_hasnode's deferred field |
| 19869 | indexes this array (offset by 1, so zero means 'not present'. */ |
| 19870 | |
| 19871 | static vec<macro_import, va_heap, vl_embed> *macro_imports; |
| 19872 | |
| 19873 | /* The exported macros. A macro_import slot's zeroth element's offset |
| 19874 | indexes this array. If the zeroth slot is not for module zero, |
| 19875 | there is no export. */ |
| 19876 | |
| 19877 | static GTY(()) vec<macro_export, va_gc> *macro_exports; |
| 19878 | |
| 19879 | /* The reachable set of header imports from this TU. */ |
| 19880 | |
| 19881 | static GTY(()) bitmap ; |
| 19882 | |
| 19883 | /* Get the (possibly empty) macro imports for NODE. */ |
| 19884 | |
| 19885 | static macro_import & |
| 19886 | get_macro_imports (cpp_hashnode *node) |
| 19887 | { |
| 19888 | if (node->deferred) |
| 19889 | return (*macro_imports)[node->deferred - 1]; |
| 19890 | |
| 19891 | vec_safe_reserve (v&: macro_imports, nelems: 1); |
| 19892 | node->deferred = macro_imports->length () + 1; |
| 19893 | return *vec_safe_push (v&: macro_imports, obj: macro_import ()); |
| 19894 | } |
| 19895 | |
| 19896 | /* Get the macro export for export EXP of NODE. */ |
| 19897 | |
| 19898 | static macro_export & |
| 19899 | get_macro_export (macro_import::slot &slot) |
| 19900 | { |
| 19901 | if (slot.offset >= 0) |
| 19902 | return (*macro_exports)[slot.offset]; |
| 19903 | |
| 19904 | vec_safe_reserve (v&: macro_exports, nelems: 1); |
| 19905 | slot.offset = macro_exports->length (); |
| 19906 | return *macro_exports->quick_push (obj: macro_export ()); |
| 19907 | } |
| 19908 | |
| 19909 | /* If NODE is an exportable macro, add it to the export set. */ |
| 19910 | |
| 19911 | static int |
| 19912 | maybe_add_macro (cpp_reader *, cpp_hashnode *node, void *data_) |
| 19913 | { |
| 19914 | bool exporting = false; |
| 19915 | |
| 19916 | if (cpp_user_macro_p (node)) |
| 19917 | if (cpp_macro *macro = node->value.macro) |
| 19918 | /* Ignore imported, builtins, command line and forced header macros. */ |
| 19919 | if (!macro->imported_p |
| 19920 | && !macro->lazy && macro->line >= spans.main_start ()) |
| 19921 | { |
| 19922 | gcc_checking_assert (macro->kind == cmk_macro); |
| 19923 | /* I don't want to deal with this corner case, that I suspect is |
| 19924 | a devil's advocate reading of the standard. */ |
| 19925 | gcc_checking_assert (!macro->extra_tokens); |
| 19926 | |
| 19927 | macro_import::slot &slot = get_macro_imports (node).exported (); |
| 19928 | macro_export &exp = get_macro_export (slot); |
| 19929 | exp.def = macro; |
| 19930 | exporting = true; |
| 19931 | } |
| 19932 | |
| 19933 | if (!exporting && node->deferred) |
| 19934 | { |
| 19935 | macro_import &imports = (*macro_imports)[node->deferred - 1]; |
| 19936 | macro_import::slot &slot = imports[0]; |
| 19937 | if (!slot.get_module ()) |
| 19938 | { |
| 19939 | gcc_checking_assert (slot.get_defness ()); |
| 19940 | exporting = true; |
| 19941 | } |
| 19942 | } |
| 19943 | |
| 19944 | if (exporting) |
| 19945 | static_cast<vec<cpp_hashnode *> *> (data_)->safe_push (obj: node); |
| 19946 | |
| 19947 | return 1; /* Don't stop. */ |
| 19948 | } |
| 19949 | |
| 19950 | /* Order cpp_hashnodes A_ and B_ by their exported macro locations. */ |
| 19951 | |
| 19952 | static int |
| 19953 | macro_loc_cmp (const void *a_, const void *b_) |
| 19954 | { |
| 19955 | const cpp_hashnode *node_a = *(const cpp_hashnode *const *)a_; |
| 19956 | macro_import &import_a = (*macro_imports)[node_a->deferred - 1]; |
| 19957 | const macro_export &export_a = (*macro_exports)[import_a[0].offset]; |
| 19958 | location_t loc_a = export_a.def ? export_a.def->line : export_a.undef_loc; |
| 19959 | |
| 19960 | const cpp_hashnode *node_b = *(const cpp_hashnode *const *)b_; |
| 19961 | macro_import &import_b = (*macro_imports)[node_b->deferred - 1]; |
| 19962 | const macro_export &export_b = (*macro_exports)[import_b[0].offset]; |
| 19963 | location_t loc_b = export_b.def ? export_b.def->line : export_b.undef_loc; |
| 19964 | |
| 19965 | if (loc_a < loc_b) |
| 19966 | return +1; |
| 19967 | else if (loc_a > loc_b) |
| 19968 | return -1; |
| 19969 | else |
| 19970 | return 0; |
| 19971 | } |
| 19972 | |
| 19973 | /* Gather the macro definitions and undefinitions that we will need to |
| 19974 | write out. */ |
| 19975 | |
| 19976 | vec<cpp_hashnode *> * |
| 19977 | module_state::prepare_macros (cpp_reader *reader) |
| 19978 | { |
| 19979 | vec<cpp_hashnode *> *macros; |
| 19980 | vec_alloc (v&: macros, nelems: 100); |
| 19981 | |
| 19982 | cpp_forall_identifiers (reader, maybe_add_macro, macros); |
| 19983 | |
| 19984 | dump (dumper::MACRO) && dump ("No more than %u macros" , macros->length ()); |
| 19985 | |
| 19986 | macros->qsort (macro_loc_cmp); |
| 19987 | |
| 19988 | // Note the locations. |
| 19989 | for (unsigned ix = macros->length (); ix--;) |
| 19990 | { |
| 19991 | cpp_hashnode *node = (*macros)[ix]; |
| 19992 | macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0]; |
| 19993 | macro_export &mac = (*macro_exports)[slot.offset]; |
| 19994 | |
| 19995 | if (IDENTIFIER_KEYWORD_P (identifier (node))) |
| 19996 | continue; |
| 19997 | |
| 19998 | if (mac.undef_loc != UNKNOWN_LOCATION) |
| 19999 | note_location (loc: mac.undef_loc); |
| 20000 | if (mac.def) |
| 20001 | { |
| 20002 | note_location (loc: mac.def->line); |
| 20003 | for (unsigned ix = 0; ix != mac.def->count; ix++) |
| 20004 | note_location (loc: mac.def->exp.tokens[ix].src_loc); |
| 20005 | } |
| 20006 | } |
| 20007 | |
| 20008 | return macros; |
| 20009 | } |
| 20010 | |
| 20011 | /* Write out the exported defines. This is two sections, one |
| 20012 | containing the definitions, the other a table of node names. */ |
| 20013 | |
| 20014 | unsigned |
| 20015 | module_state::write_macros (elf_out *to, vec<cpp_hashnode *> *macros, |
| 20016 | unsigned *crc_p) |
| 20017 | { |
| 20018 | dump () && dump ("Writing macros" ); |
| 20019 | dump.indent (); |
| 20020 | |
| 20021 | /* Write the defs */ |
| 20022 | bytes_out sec (to); |
| 20023 | sec.begin (); |
| 20024 | |
| 20025 | unsigned count = 0; |
| 20026 | for (unsigned ix = macros->length (); ix--;) |
| 20027 | { |
| 20028 | cpp_hashnode *node = (*macros)[ix]; |
| 20029 | macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0]; |
| 20030 | gcc_assert (!slot.get_module () && slot.get_defness ()); |
| 20031 | |
| 20032 | macro_export &mac = (*macro_exports)[slot.offset]; |
| 20033 | gcc_assert (!!(slot.get_defness () & macro_import::slot::L_UNDEF) |
| 20034 | == (mac.undef_loc != UNKNOWN_LOCATION) |
| 20035 | && !!(slot.get_defness () & macro_import::slot::L_DEF) |
| 20036 | == (mac.def != NULL)); |
| 20037 | |
| 20038 | if (IDENTIFIER_KEYWORD_P (identifier (node))) |
| 20039 | { |
| 20040 | warning_at (mac.def->line, 0, |
| 20041 | "not exporting %<#define %E%> as it is a keyword" , |
| 20042 | identifier (node)); |
| 20043 | slot.offset = 0; |
| 20044 | continue; |
| 20045 | } |
| 20046 | |
| 20047 | count++; |
| 20048 | slot.offset = sec.pos; |
| 20049 | dump (dumper::MACRO) |
| 20050 | && dump ("Writing macro %s%s%s %I at %u" , |
| 20051 | slot.get_defness () & macro_import::slot::L_UNDEF |
| 20052 | ? "#undef" : "" , |
| 20053 | slot.get_defness () == macro_import::slot::L_BOTH |
| 20054 | ? " & " : "" , |
| 20055 | slot.get_defness () & macro_import::slot::L_DEF |
| 20056 | ? "#define" : "" , |
| 20057 | identifier (node), slot.offset); |
| 20058 | if (mac.undef_loc != UNKNOWN_LOCATION) |
| 20059 | write_location (sec, loc: mac.undef_loc); |
| 20060 | if (mac.def) |
| 20061 | write_define (sec, macro: mac.def); |
| 20062 | } |
| 20063 | if (count) |
| 20064 | // We may have ended on a tokenless macro with a very short |
| 20065 | // location, that will cause problems reading its bit flags. |
| 20066 | sec.u (v: 0); |
| 20067 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".def" ), crc_ptr: crc_p); |
| 20068 | |
| 20069 | if (count) |
| 20070 | { |
| 20071 | /* Write the table. */ |
| 20072 | bytes_out sec (to); |
| 20073 | sec.begin (); |
| 20074 | sec.u (v: count); |
| 20075 | |
| 20076 | for (unsigned ix = macros->length (); ix--;) |
| 20077 | { |
| 20078 | const cpp_hashnode *node = (*macros)[ix]; |
| 20079 | macro_import::slot &slot = (*macro_imports)[node->deferred - 1][0]; |
| 20080 | |
| 20081 | if (slot.offset) |
| 20082 | { |
| 20083 | sec.cpp_node (node); |
| 20084 | sec.u (v: slot.get_defness ()); |
| 20085 | sec.u (v: slot.offset); |
| 20086 | } |
| 20087 | } |
| 20088 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".mac" ), crc_ptr: crc_p); |
| 20089 | } |
| 20090 | |
| 20091 | dump.outdent (); |
| 20092 | return count; |
| 20093 | } |
| 20094 | |
| 20095 | bool |
| 20096 | module_state::read_macros () |
| 20097 | { |
| 20098 | /* Get the def section. */ |
| 20099 | if (!slurp->macro_defs.begin (loc, source: from (), MOD_SNAME_PFX ".def" )) |
| 20100 | return false; |
| 20101 | |
| 20102 | /* Get the tbl section, if there are defs. */ |
| 20103 | if (slurp->macro_defs.more_p () |
| 20104 | && !slurp->macro_tbl.begin (loc, source: from (), MOD_SNAME_PFX ".mac" )) |
| 20105 | return false; |
| 20106 | |
| 20107 | return true; |
| 20108 | } |
| 20109 | |
| 20110 | /* Install the macro name table. */ |
| 20111 | |
| 20112 | void |
| 20113 | module_state::install_macros () |
| 20114 | { |
| 20115 | bytes_in &sec = slurp->macro_tbl; |
| 20116 | if (!sec.size) |
| 20117 | return; |
| 20118 | |
| 20119 | dump () && dump ("Reading macro table %M" , this); |
| 20120 | dump.indent (); |
| 20121 | |
| 20122 | unsigned count = sec.u (); |
| 20123 | dump () && dump ("%u macros" , count); |
| 20124 | while (count--) |
| 20125 | { |
| 20126 | cpp_hashnode *node = sec.cpp_node (); |
| 20127 | macro_import &imp = get_macro_imports (node); |
| 20128 | unsigned flags = sec.u () & macro_import::slot::L_BOTH; |
| 20129 | if (!flags) |
| 20130 | sec.set_overrun (); |
| 20131 | |
| 20132 | if (sec.get_overrun ()) |
| 20133 | break; |
| 20134 | |
| 20135 | macro_import::slot &slot = imp.append (module: mod, defness: flags); |
| 20136 | slot.offset = sec.u (); |
| 20137 | |
| 20138 | dump (dumper::MACRO) |
| 20139 | && dump ("Read %s macro %s%s%s %I at %u" , |
| 20140 | imp.length () > 1 ? "add" : "new" , |
| 20141 | flags & macro_import::slot::L_UNDEF ? "#undef" : "" , |
| 20142 | flags == macro_import::slot::L_BOTH ? " & " : "" , |
| 20143 | flags & macro_import::slot::L_DEF ? "#define" : "" , |
| 20144 | identifier (node), slot.offset); |
| 20145 | |
| 20146 | /* We'll leak an imported definition's TOKEN_FLD_STR's data |
| 20147 | here. But that only happens when we've had to resolve the |
| 20148 | deferred macro before this import -- why are you doing |
| 20149 | that? */ |
| 20150 | if (cpp_macro *cur = cpp_set_deferred_macro (node)) |
| 20151 | if (!cur->imported_p) |
| 20152 | { |
| 20153 | macro_import::slot &slot = imp.exported (); |
| 20154 | macro_export &exp = get_macro_export (slot); |
| 20155 | exp.def = cur; |
| 20156 | dump (dumper::MACRO) |
| 20157 | && dump ("Saving current #define %I" , identifier (node)); |
| 20158 | } |
| 20159 | } |
| 20160 | |
| 20161 | /* We're now done with the table. */ |
| 20162 | elf_in::release (self: slurp->from, bytes&: sec); |
| 20163 | |
| 20164 | dump.outdent (); |
| 20165 | } |
| 20166 | |
| 20167 | /* Import the transitive macros. */ |
| 20168 | |
| 20169 | void |
| 20170 | module_state::import_macros () |
| 20171 | { |
| 20172 | bitmap_ior_into (headers, slurp->headers); |
| 20173 | |
| 20174 | bitmap_iterator bititer; |
| 20175 | unsigned bitnum; |
| 20176 | EXECUTE_IF_SET_IN_BITMAP (slurp->headers, 0, bitnum, bititer) |
| 20177 | (*modules)[bitnum]->install_macros (); |
| 20178 | } |
| 20179 | |
| 20180 | /* NODE is being undefined at LOC. Record it in the export table, if |
| 20181 | necessary. */ |
| 20182 | |
| 20183 | void |
| 20184 | module_state::undef_macro (cpp_reader *, location_t loc, cpp_hashnode *node) |
| 20185 | { |
| 20186 | if (!node->deferred) |
| 20187 | /* The macro is not imported, so our undef is irrelevant. */ |
| 20188 | return; |
| 20189 | |
| 20190 | unsigned n = dump.push (NULL); |
| 20191 | |
| 20192 | macro_import::slot &slot = (*macro_imports)[node->deferred - 1].exported (); |
| 20193 | macro_export &exp = get_macro_export (slot); |
| 20194 | |
| 20195 | exp.undef_loc = loc; |
| 20196 | slot.become_undef (); |
| 20197 | exp.def = NULL; |
| 20198 | |
| 20199 | dump (dumper::MACRO) && dump ("Recording macro #undef %I" , identifier (node)); |
| 20200 | |
| 20201 | dump.pop (n); |
| 20202 | } |
| 20203 | |
| 20204 | /* NODE is a deferred macro node. Determine the definition and return |
| 20205 | it, with NULL if undefined. May issue diagnostics. |
| 20206 | |
| 20207 | This can leak memory, when merging declarations -- the string |
| 20208 | contents (TOKEN_FLD_STR) of each definition are allocated in |
| 20209 | unreclaimable cpp objstack. Only one will win. However, I do not |
| 20210 | expect this to be common -- mostly macros have a single point of |
| 20211 | definition. Perhaps we could restore the objstack to its position |
| 20212 | after the first imported definition (if that wins)? The macros |
| 20213 | themselves are GC'd. */ |
| 20214 | |
| 20215 | cpp_macro * |
| 20216 | module_state::deferred_macro (cpp_reader *reader, location_t loc, |
| 20217 | cpp_hashnode *node) |
| 20218 | { |
| 20219 | macro_import &imports = (*macro_imports)[node->deferred - 1]; |
| 20220 | |
| 20221 | unsigned n = dump.push (NULL); |
| 20222 | dump (dumper::MACRO) && dump ("Deferred macro %I" , identifier (node)); |
| 20223 | |
| 20224 | bitmap visible (BITMAP_GGC_ALLOC ()); |
| 20225 | |
| 20226 | if (!((imports[0].get_defness () & macro_import::slot::L_UNDEF) |
| 20227 | && !imports[0].get_module ())) |
| 20228 | { |
| 20229 | /* Calculate the set of visible header imports. */ |
| 20230 | bitmap_copy (visible, headers); |
| 20231 | for (unsigned ix = imports.length (); ix--;) |
| 20232 | { |
| 20233 | const macro_import::slot &slot = imports[ix]; |
| 20234 | unsigned mod = slot.get_module (); |
| 20235 | if ((slot.get_defness () & macro_import::slot::L_UNDEF) |
| 20236 | && bitmap_bit_p (visible, mod)) |
| 20237 | { |
| 20238 | bitmap arg = mod ? (*modules)[mod]->slurp->headers : headers; |
| 20239 | bitmap_and_compl_into (visible, arg); |
| 20240 | bitmap_set_bit (visible, mod); |
| 20241 | } |
| 20242 | } |
| 20243 | } |
| 20244 | bitmap_set_bit (visible, 0); |
| 20245 | |
| 20246 | /* Now find the macros that are still visible. */ |
| 20247 | bool failed = false; |
| 20248 | cpp_macro *def = NULL; |
| 20249 | vec<macro_export> defs; |
| 20250 | defs.create (nelems: imports.length ()); |
| 20251 | for (unsigned ix = imports.length (); ix--;) |
| 20252 | { |
| 20253 | const macro_import::slot &slot = imports[ix]; |
| 20254 | unsigned mod = slot.get_module (); |
| 20255 | if (bitmap_bit_p (visible, mod)) |
| 20256 | { |
| 20257 | macro_export *pushed = NULL; |
| 20258 | if (mod) |
| 20259 | { |
| 20260 | const module_state *imp = (*modules)[mod]; |
| 20261 | bytes_in &sec = imp->slurp->macro_defs; |
| 20262 | if (!sec.get_overrun ()) |
| 20263 | { |
| 20264 | dump (dumper::MACRO) |
| 20265 | && dump ("Reading macro %s%s%s %I module %M at %u" , |
| 20266 | slot.get_defness () & macro_import::slot::L_UNDEF |
| 20267 | ? "#undef" : "" , |
| 20268 | slot.get_defness () == macro_import::slot::L_BOTH |
| 20269 | ? " & " : "" , |
| 20270 | slot.get_defness () & macro_import::slot::L_DEF |
| 20271 | ? "#define" : "" , |
| 20272 | identifier (node), imp, slot.offset); |
| 20273 | sec.random_access (offset: slot.offset); |
| 20274 | |
| 20275 | macro_export exp; |
| 20276 | if (slot.get_defness () & macro_import::slot::L_UNDEF) |
| 20277 | exp.undef_loc = imp->read_location (sec); |
| 20278 | if (slot.get_defness () & macro_import::slot::L_DEF) |
| 20279 | exp.def = imp->read_define (sec, reader); |
| 20280 | if (sec.get_overrun ()) |
| 20281 | error_at (loc, "macro definitions of %qE corrupted" , |
| 20282 | imp->name); |
| 20283 | else |
| 20284 | pushed = defs.quick_push (obj: exp); |
| 20285 | } |
| 20286 | } |
| 20287 | else |
| 20288 | pushed = defs.quick_push (obj: (*macro_exports)[slot.offset]); |
| 20289 | if (pushed && pushed->def) |
| 20290 | { |
| 20291 | if (!def) |
| 20292 | def = pushed->def; |
| 20293 | else if (cpp_compare_macros (macro1: def, macro2: pushed->def)) |
| 20294 | failed = true; |
| 20295 | } |
| 20296 | } |
| 20297 | } |
| 20298 | |
| 20299 | if (failed) |
| 20300 | { |
| 20301 | /* If LOC is the first loc, this is the end of file check, which |
| 20302 | is a warning. */ |
| 20303 | auto_diagnostic_group d; |
| 20304 | if (loc == MAP_START_LOCATION (map: LINEMAPS_ORDINARY_MAP_AT (set: line_table, index: 0))) |
| 20305 | warning_at (loc, OPT_Winvalid_imported_macros, |
| 20306 | "inconsistent imported macro definition %qE" , |
| 20307 | identifier (node)); |
| 20308 | else |
| 20309 | error_at (loc, "inconsistent imported macro definition %qE" , |
| 20310 | identifier (node)); |
| 20311 | for (unsigned ix = defs.length (); ix--;) |
| 20312 | { |
| 20313 | macro_export &exp = defs[ix]; |
| 20314 | if (exp.undef_loc) |
| 20315 | inform (exp.undef_loc, "%<#undef %E%>" , identifier (node)); |
| 20316 | if (exp.def) |
| 20317 | inform (exp.def->line, "%<#define %s%>" , |
| 20318 | cpp_macro_definition (reader, node, exp.def)); |
| 20319 | } |
| 20320 | def = NULL; |
| 20321 | } |
| 20322 | |
| 20323 | defs.release (); |
| 20324 | |
| 20325 | dump.pop (n); |
| 20326 | |
| 20327 | return def; |
| 20328 | } |
| 20329 | |
| 20330 | /* Stream the static aggregates. Sadly some headers (ahem: |
| 20331 | iostream) contain static vars, and rely on them to run global |
| 20332 | ctors. */ |
| 20333 | unsigned |
| 20334 | module_state::write_inits (elf_out *to, depset::hash &table, unsigned *crc_ptr) |
| 20335 | { |
| 20336 | if (!static_aggregates && !tls_aggregates) |
| 20337 | return 0; |
| 20338 | |
| 20339 | dump () && dump ("Writing initializers" ); |
| 20340 | dump.indent (); |
| 20341 | |
| 20342 | static_aggregates = nreverse (static_aggregates); |
| 20343 | tls_aggregates = nreverse (tls_aggregates); |
| 20344 | |
| 20345 | unsigned count = 0; |
| 20346 | trees_out sec (to, this, table, ~0u); |
| 20347 | sec.begin (); |
| 20348 | |
| 20349 | tree list = static_aggregates; |
| 20350 | for (int passes = 0; passes != 2; passes++) |
| 20351 | { |
| 20352 | for (tree init = list; init; init = TREE_CHAIN (init)) |
| 20353 | if (TREE_LANG_FLAG_0 (init)) |
| 20354 | { |
| 20355 | if (STATIC_INIT_DECOMP_BASE_P (init)) |
| 20356 | { |
| 20357 | /* Ensure that in the returned result chain if the |
| 20358 | STATIC_INIT_DECOMP_*BASE_P flags are set, there is |
| 20359 | always one or more STATIC_INIT_DECOMP_BASE_P TREE_LIST |
| 20360 | followed by one or more STATIC_INIT_DECOMP_NONBASE_P. */ |
| 20361 | int phase = 0; |
| 20362 | tree last = NULL_TREE; |
| 20363 | for (tree init2 = TREE_CHAIN (init); |
| 20364 | init2; init2 = TREE_CHAIN (init2)) |
| 20365 | { |
| 20366 | if (phase == 0 && STATIC_INIT_DECOMP_BASE_P (init2)) |
| 20367 | ; |
| 20368 | else if (phase == 0 |
| 20369 | && STATIC_INIT_DECOMP_NONBASE_P (init2)) |
| 20370 | { |
| 20371 | phase = TREE_LANG_FLAG_0 (init2) ? 2 : 1; |
| 20372 | last = init2; |
| 20373 | } |
| 20374 | else if (IN_RANGE (phase, 1, 2) |
| 20375 | && STATIC_INIT_DECOMP_NONBASE_P (init2)) |
| 20376 | { |
| 20377 | if (TREE_LANG_FLAG_0 (init2)) |
| 20378 | phase = 2; |
| 20379 | last = init2; |
| 20380 | } |
| 20381 | else |
| 20382 | break; |
| 20383 | } |
| 20384 | if (phase == 2) |
| 20385 | { |
| 20386 | /* In that case, add markers about it so that the |
| 20387 | STATIC_INIT_DECOMP_BASE_P and |
| 20388 | STATIC_INIT_DECOMP_NONBASE_P flags can be restored. */ |
| 20389 | sec.tree_node (t: build_int_cst (integer_type_node, |
| 20390 | 2 * passes + 1)); |
| 20391 | phase = 1; |
| 20392 | for (tree init2 = init; init2 != TREE_CHAIN (last); |
| 20393 | init2 = TREE_CHAIN (init2)) |
| 20394 | if (TREE_LANG_FLAG_0 (init2)) |
| 20395 | { |
| 20396 | tree decl = TREE_VALUE (init2); |
| 20397 | if (phase == 1 |
| 20398 | && STATIC_INIT_DECOMP_NONBASE_P (init2)) |
| 20399 | { |
| 20400 | sec.tree_node (t: build_int_cst (integer_type_node, |
| 20401 | 2 * passes + 2)); |
| 20402 | phase = 2; |
| 20403 | } |
| 20404 | dump ("Initializer:%u for %N" , count, decl); |
| 20405 | sec.tree_node (t: decl); |
| 20406 | ++count; |
| 20407 | } |
| 20408 | sec.tree_node (integer_zero_node); |
| 20409 | init = last; |
| 20410 | continue; |
| 20411 | } |
| 20412 | } |
| 20413 | |
| 20414 | tree decl = TREE_VALUE (init); |
| 20415 | |
| 20416 | dump ("Initializer:%u for %N" , count, decl); |
| 20417 | sec.tree_node (t: decl); |
| 20418 | ++count; |
| 20419 | } |
| 20420 | |
| 20421 | list = tls_aggregates; |
| 20422 | } |
| 20423 | |
| 20424 | sec.end (sink: to, name: to->name (MOD_SNAME_PFX ".ini" ), crc_ptr); |
| 20425 | dump.outdent (); |
| 20426 | |
| 20427 | return count; |
| 20428 | } |
| 20429 | |
| 20430 | /* We have to defer some post-load processing until we've completed |
| 20431 | reading, because they can cause more reading. */ |
| 20432 | |
| 20433 | static void |
| 20434 | post_load_processing () |
| 20435 | { |
| 20436 | /* We mustn't cause a GC, our caller should have arranged for that |
| 20437 | not to happen. */ |
| 20438 | gcc_checking_assert (function_depth); |
| 20439 | |
| 20440 | if (!post_load_decls) |
| 20441 | return; |
| 20442 | |
| 20443 | tree old_cfd = current_function_decl; |
| 20444 | struct function *old_cfun = cfun; |
| 20445 | while (post_load_decls->length ()) |
| 20446 | { |
| 20447 | tree decl = post_load_decls->pop (); |
| 20448 | |
| 20449 | dump () && dump ("Post-load processing of %N" , decl); |
| 20450 | |
| 20451 | gcc_checking_assert (DECL_MAYBE_IN_CHARGE_CDTOR_P (decl)); |
| 20452 | expand_or_defer_fn (decl); |
| 20453 | /* As in module_state::read_cluster. */ |
| 20454 | if (at_eof && DECL_COMDAT (decl) && DECL_EXTERNAL (decl) |
| 20455 | && DECL_NOT_REALLY_EXTERN (decl)) |
| 20456 | DECL_EXTERNAL (decl) = false; |
| 20457 | } |
| 20458 | |
| 20459 | set_cfun (new_cfun: old_cfun); |
| 20460 | current_function_decl = old_cfd; |
| 20461 | } |
| 20462 | |
| 20463 | bool |
| 20464 | module_state::read_inits (unsigned count) |
| 20465 | { |
| 20466 | trees_in sec (this); |
| 20467 | if (!sec.begin (loc, source: from (), snum: from ()->find (MOD_SNAME_PFX ".ini" ))) |
| 20468 | return false; |
| 20469 | dump () && dump ("Reading %u initializers" , count); |
| 20470 | dump.indent (); |
| 20471 | |
| 20472 | lazy_snum = ~0u; |
| 20473 | int decomp_phase = 0; |
| 20474 | tree *aggrp = NULL; |
| 20475 | for (unsigned ix = 0; ix != count; ix++) |
| 20476 | { |
| 20477 | tree last = NULL_TREE; |
| 20478 | if (decomp_phase) |
| 20479 | last = *aggrp; |
| 20480 | /* Merely referencing the decl causes its initializer to be read |
| 20481 | and added to the correct list. */ |
| 20482 | tree decl = sec.tree_node (); |
| 20483 | /* module_state::write_inits can add special INTEGER_CST markers in |
| 20484 | between the decls. 1 means STATIC_INIT_DECOMP_BASE_P entries |
| 20485 | follow in static_aggregates, 2 means STATIC_INIT_DECOMP_NONBASE_P |
| 20486 | entries follow in static_aggregates, 3 means |
| 20487 | STATIC_INIT_DECOMP_BASE_P entries follow in tls_aggregates, |
| 20488 | 4 means STATIC_INIT_DECOMP_NONBASE_P follow in tls_aggregates, |
| 20489 | 0 means end of STATIC_INIT_DECOMP_{,NON}BASE_P sequence. */ |
| 20490 | if (tree_fits_shwi_p (decl)) |
| 20491 | { |
| 20492 | if (sec.get_overrun ()) |
| 20493 | break; |
| 20494 | decomp_phase = tree_to_shwi (decl); |
| 20495 | if (decomp_phase) |
| 20496 | { |
| 20497 | aggrp = decomp_phase > 2 ? &tls_aggregates : &static_aggregates; |
| 20498 | last = *aggrp; |
| 20499 | } |
| 20500 | decl = sec.tree_node (); |
| 20501 | } |
| 20502 | |
| 20503 | if (sec.get_overrun ()) |
| 20504 | break; |
| 20505 | if (decl) |
| 20506 | dump ("Initializer:%u for %N" , ix, decl); |
| 20507 | if (decomp_phase) |
| 20508 | { |
| 20509 | tree init = *aggrp; |
| 20510 | gcc_assert (TREE_VALUE (init) == decl && TREE_CHAIN (init) == last); |
| 20511 | if ((decomp_phase & 1) != 0) |
| 20512 | STATIC_INIT_DECOMP_BASE_P (init) = 1; |
| 20513 | else |
| 20514 | STATIC_INIT_DECOMP_NONBASE_P (init) = 1; |
| 20515 | } |
| 20516 | } |
| 20517 | if (decomp_phase && !sec.get_overrun ()) |
| 20518 | { |
| 20519 | tree decl = sec.tree_node (); |
| 20520 | gcc_assert (integer_zerop (decl)); |
| 20521 | } |
| 20522 | lazy_snum = 0; |
| 20523 | post_load_processing (); |
| 20524 | dump.outdent (); |
| 20525 | if (!sec.end (src: from ())) |
| 20526 | return false; |
| 20527 | return true; |
| 20528 | } |
| 20529 | |
| 20530 | void |
| 20531 | module_state::write_counts (elf_out *to, unsigned counts[MSC_HWM], |
| 20532 | unsigned *crc_ptr) |
| 20533 | { |
| 20534 | bytes_out cfg (to); |
| 20535 | |
| 20536 | cfg.begin (); |
| 20537 | |
| 20538 | for (unsigned ix = MSC_HWM; ix--;) |
| 20539 | cfg.u (v: counts[ix]); |
| 20540 | |
| 20541 | if (dump ()) |
| 20542 | { |
| 20543 | dump ("Cluster sections are [%u,%u)" , |
| 20544 | counts[MSC_sec_lwm], counts[MSC_sec_hwm]); |
| 20545 | dump ("Bindings %u" , counts[MSC_bindings]); |
| 20546 | dump ("Pendings %u" , counts[MSC_pendings]); |
| 20547 | dump ("Entities %u" , counts[MSC_entities]); |
| 20548 | dump ("Namespaces %u" , counts[MSC_namespaces]); |
| 20549 | dump ("Using-directives %u" , counts[MSC_using_directives]); |
| 20550 | dump ("Macros %u" , counts[MSC_macros]); |
| 20551 | dump ("Initializers %u" , counts[MSC_inits]); |
| 20552 | } |
| 20553 | |
| 20554 | cfg.end (sink: to, name: to->name (MOD_SNAME_PFX ".cnt" ), crc_ptr); |
| 20555 | } |
| 20556 | |
| 20557 | bool |
| 20558 | module_state::read_counts (unsigned counts[MSC_HWM]) |
| 20559 | { |
| 20560 | bytes_in cfg; |
| 20561 | |
| 20562 | if (!cfg.begin (loc, source: from (), MOD_SNAME_PFX ".cnt" )) |
| 20563 | return false; |
| 20564 | |
| 20565 | for (unsigned ix = MSC_HWM; ix--;) |
| 20566 | counts[ix] = cfg.u (); |
| 20567 | |
| 20568 | if (dump ()) |
| 20569 | { |
| 20570 | dump ("Declaration sections are [%u,%u)" , |
| 20571 | counts[MSC_sec_lwm], counts[MSC_sec_hwm]); |
| 20572 | dump ("Bindings %u" , counts[MSC_bindings]); |
| 20573 | dump ("Pendings %u" , counts[MSC_pendings]); |
| 20574 | dump ("Entities %u" , counts[MSC_entities]); |
| 20575 | dump ("Namespaces %u" , counts[MSC_namespaces]); |
| 20576 | dump ("Using-directives %u" , counts[MSC_using_directives]); |
| 20577 | dump ("Macros %u" , counts[MSC_macros]); |
| 20578 | dump ("Initializers %u" , counts[MSC_inits]); |
| 20579 | } |
| 20580 | |
| 20581 | return cfg.end (src: from ()); |
| 20582 | } |
| 20583 | |
| 20584 | /* Tool configuration: MOD_SNAME_PFX .config |
| 20585 | |
| 20586 | This is data that confirms current state (or fails). */ |
| 20587 | |
| 20588 | void |
| 20589 | module_state::write_config (elf_out *to, module_state_config &config, |
| 20590 | unsigned inner_crc) |
| 20591 | { |
| 20592 | bytes_out cfg (to); |
| 20593 | |
| 20594 | cfg.begin (); |
| 20595 | |
| 20596 | /* Write version and inner crc as u32 values, for easier |
| 20597 | debug inspection. */ |
| 20598 | dump () && dump ("Writing version=%V, inner_crc=%x" , |
| 20599 | MODULE_VERSION, inner_crc); |
| 20600 | cfg.u32 (val: unsigned (MODULE_VERSION)); |
| 20601 | cfg.u32 (val: inner_crc); |
| 20602 | |
| 20603 | cfg.u (v: to->name (literal: is_header () ? "" : get_flatname ())); |
| 20604 | |
| 20605 | /* Configuration. */ |
| 20606 | dump () && dump ("Writing target='%s', host='%s'" , |
| 20607 | TARGET_MACHINE, HOST_MACHINE); |
| 20608 | unsigned target = to->name (TARGET_MACHINE); |
| 20609 | unsigned host = (!strcmp (TARGET_MACHINE, HOST_MACHINE) |
| 20610 | ? target : to->name (HOST_MACHINE)); |
| 20611 | cfg.u (v: target); |
| 20612 | cfg.u (v: host); |
| 20613 | |
| 20614 | cfg.str (ptr: config.dialect_str); |
| 20615 | cfg.u (v: extensions); |
| 20616 | |
| 20617 | /* Global tree information. We write the globals crc separately, |
| 20618 | rather than mix it directly into the overall crc, as it is used |
| 20619 | to ensure data match between instances of the compiler, not |
| 20620 | integrity of the file. */ |
| 20621 | dump () && dump ("Writing globals=%u, crc=%x" , |
| 20622 | fixed_trees->length (), global_crc); |
| 20623 | cfg.u (v: fixed_trees->length ()); |
| 20624 | cfg.u32 (val: global_crc); |
| 20625 | |
| 20626 | if (is_partition ()) |
| 20627 | cfg.u (v: is_interface ()); |
| 20628 | |
| 20629 | cfg.u (v: config.num_imports); |
| 20630 | cfg.u (v: config.num_partitions); |
| 20631 | cfg.u (v: config.num_entities); |
| 20632 | |
| 20633 | cfg.loc (l: config.ordinary_locs); |
| 20634 | cfg.loc (l: config.macro_locs); |
| 20635 | cfg.u (v: config.loc_range_bits); |
| 20636 | |
| 20637 | cfg.u (v: config.active_init); |
| 20638 | |
| 20639 | /* Now generate CRC, we'll have incorporated the inner CRC because |
| 20640 | of its serialization above. */ |
| 20641 | cfg.end (sink: to, name: to->name (MOD_SNAME_PFX ".cfg" ), crc_ptr: &crc); |
| 20642 | dump () && dump ("Writing CRC=%x" , crc); |
| 20643 | } |
| 20644 | |
| 20645 | void |
| 20646 | module_state::note_cmi_name () |
| 20647 | { |
| 20648 | if (!cmi_noted_p && filename) |
| 20649 | { |
| 20650 | cmi_noted_p = true; |
| 20651 | inform (loc, "compiled module file is %qs" , |
| 20652 | maybe_add_cmi_prefix (to: filename)); |
| 20653 | } |
| 20654 | } |
| 20655 | |
| 20656 | bool |
| 20657 | module_state::read_config (module_state_config &config, bool complain) |
| 20658 | { |
| 20659 | bytes_in cfg; |
| 20660 | |
| 20661 | if (!cfg.begin (loc, source: from (), MOD_SNAME_PFX ".cfg" )) |
| 20662 | return false; |
| 20663 | |
| 20664 | /* Check version. */ |
| 20665 | unsigned my_ver = MODULE_VERSION; |
| 20666 | unsigned their_ver = cfg.u32 (); |
| 20667 | dump () && dump (my_ver == their_ver ? "Version %V" |
| 20668 | : "Expecting %V found %V" , my_ver, their_ver); |
| 20669 | if (their_ver != my_ver) |
| 20670 | { |
| 20671 | /* The compiler versions differ. Close enough? */ |
| 20672 | verstr_t my_string, their_string; |
| 20673 | |
| 20674 | version2string (version: my_ver, out&: my_string); |
| 20675 | version2string (version: their_ver, out&: their_string); |
| 20676 | |
| 20677 | /* Reject when either is non-experimental or when experimental |
| 20678 | major versions differ. */ |
| 20679 | auto_diagnostic_group d; |
| 20680 | bool reject_p = ((!IS_EXPERIMENTAL (my_ver) |
| 20681 | || !IS_EXPERIMENTAL (their_ver) |
| 20682 | || MODULE_MAJOR (my_ver) != MODULE_MAJOR (their_ver)) |
| 20683 | /* The 'I know what I'm doing' switch. */ |
| 20684 | && !flag_module_version_ignore); |
| 20685 | bool inform_p = true; |
| 20686 | if (!complain) |
| 20687 | inform_p = false; |
| 20688 | else if (reject_p) |
| 20689 | { |
| 20690 | cfg.set_overrun (); |
| 20691 | error_at (loc, "compiled module is %sversion %s" , |
| 20692 | IS_EXPERIMENTAL (their_ver) ? "experimental " : "" , |
| 20693 | their_string); |
| 20694 | } |
| 20695 | else |
| 20696 | inform_p = warning_at (loc, 0, "compiled module is %sversion %s" , |
| 20697 | IS_EXPERIMENTAL (their_ver) ? "experimental " : "" , |
| 20698 | their_string); |
| 20699 | |
| 20700 | if (inform_p) |
| 20701 | { |
| 20702 | inform (loc, "compiler is %sversion %s%s%s" , |
| 20703 | IS_EXPERIMENTAL (my_ver) ? "experimental " : "" , |
| 20704 | my_string, |
| 20705 | reject_p ? "" : flag_module_version_ignore |
| 20706 | ? ", be it on your own head!" : ", close enough?" , |
| 20707 | reject_p ? "" : " \xc2\xaf\\_(\xe3\x83\x84)_/\xc2\xaf" ); |
| 20708 | note_cmi_name (); |
| 20709 | } |
| 20710 | |
| 20711 | if (reject_p) |
| 20712 | goto done; |
| 20713 | } |
| 20714 | |
| 20715 | /* We wrote the inner crc merely to merge it, so simply read it |
| 20716 | back and forget it. */ |
| 20717 | cfg.u32 (); |
| 20718 | |
| 20719 | /* Check module name. */ |
| 20720 | { |
| 20721 | const char *their_name = from ()->name (offset: cfg.u ()); |
| 20722 | const char *our_name = "" ; |
| 20723 | |
| 20724 | if (!is_header ()) |
| 20725 | our_name = get_flatname (); |
| 20726 | |
| 20727 | /* Header units can be aliased, so name checking is |
| 20728 | inappropriate. */ |
| 20729 | if (0 != strcmp (s1: their_name, s2: our_name)) |
| 20730 | { |
| 20731 | error_at (loc, |
| 20732 | their_name[0] && our_name[0] ? G_("module %qs found" ) |
| 20733 | : their_name[0] |
| 20734 | ? G_("header module expected, module %qs found" ) |
| 20735 | : G_("module %qs expected, header module found" ), |
| 20736 | their_name[0] ? their_name : our_name); |
| 20737 | cfg.set_overrun (); |
| 20738 | goto done; |
| 20739 | } |
| 20740 | } |
| 20741 | |
| 20742 | /* Check the CRC after the above sanity checks, so that the user is |
| 20743 | clued in. */ |
| 20744 | { |
| 20745 | unsigned e_crc = crc; |
| 20746 | crc = cfg.get_crc (); |
| 20747 | dump () && dump ("Reading CRC=%x" , crc); |
| 20748 | /* When not complaining we haven't set directness yet, so ignore the |
| 20749 | mismatch. */ |
| 20750 | if (complain && !is_direct () && crc != e_crc) |
| 20751 | { |
| 20752 | error_at (loc, "module %qs CRC mismatch" , get_flatname ()); |
| 20753 | cfg.set_overrun (); |
| 20754 | goto done; |
| 20755 | } |
| 20756 | } |
| 20757 | |
| 20758 | /* Check target & host. */ |
| 20759 | { |
| 20760 | const char *their_target = from ()->name (offset: cfg.u ()); |
| 20761 | const char *their_host = from ()->name (offset: cfg.u ()); |
| 20762 | dump () && dump ("Read target='%s', host='%s'" , their_target, their_host); |
| 20763 | if (strcmp (their_target, TARGET_MACHINE) |
| 20764 | || strcmp (their_host, HOST_MACHINE)) |
| 20765 | { |
| 20766 | error_at (loc, "target & host is %qs:%qs, expected %qs:%qs" , |
| 20767 | their_target, TARGET_MACHINE, their_host, HOST_MACHINE); |
| 20768 | cfg.set_overrun (); |
| 20769 | goto done; |
| 20770 | } |
| 20771 | } |
| 20772 | |
| 20773 | /* Check compilation dialect. This must match. */ |
| 20774 | { |
| 20775 | const char *their_dialect = cfg.str (); |
| 20776 | if (strcmp (s1: their_dialect, s2: config.dialect_str)) |
| 20777 | { |
| 20778 | if (complain) |
| 20779 | error_at (loc, "language dialect differs %qs, expected %qs" , |
| 20780 | their_dialect, config.dialect_str); |
| 20781 | cfg.set_overrun (); |
| 20782 | goto done; |
| 20783 | } |
| 20784 | } |
| 20785 | |
| 20786 | /* Check for extensions. If they set any, we must have them set |
| 20787 | too. */ |
| 20788 | { |
| 20789 | unsigned ext = cfg.u (); |
| 20790 | unsigned allowed = (flag_openmp ? SE_OPENMP | SE_OPENMP_SIMD : 0); |
| 20791 | if (flag_openmp_simd) |
| 20792 | allowed |= SE_OPENMP_SIMD; |
| 20793 | if (flag_openacc) |
| 20794 | allowed |= SE_OPENACC; |
| 20795 | |
| 20796 | if (unsigned bad = ext & ~allowed) |
| 20797 | { |
| 20798 | if (bad & SE_OPENMP) |
| 20799 | error_at (loc, "module contains OpenMP, use %<-fopenmp%> to enable" ); |
| 20800 | else if (bad & SE_OPENMP_SIMD) |
| 20801 | error_at (loc, "module contains OpenMP, use %<-fopenmp%> or " |
| 20802 | "%<-fopenmp-simd%> to enable" ); |
| 20803 | if (bad & SE_OPENACC) |
| 20804 | error_at (loc, "module contains OpenACC, use %<-fopenacc%> to " |
| 20805 | "enable" ); |
| 20806 | cfg.set_overrun (); |
| 20807 | goto done; |
| 20808 | } |
| 20809 | extensions = ext; |
| 20810 | } |
| 20811 | |
| 20812 | /* Check global trees. */ |
| 20813 | { |
| 20814 | unsigned their_fixed_length = cfg.u (); |
| 20815 | unsigned their_fixed_crc = cfg.u32 (); |
| 20816 | dump () && dump ("Read globals=%u, crc=%x" , |
| 20817 | their_fixed_length, their_fixed_crc); |
| 20818 | if (!flag_preprocess_only |
| 20819 | && (their_fixed_length != fixed_trees->length () |
| 20820 | || their_fixed_crc != global_crc)) |
| 20821 | { |
| 20822 | error_at (loc, "fixed tree mismatch" ); |
| 20823 | cfg.set_overrun (); |
| 20824 | goto done; |
| 20825 | } |
| 20826 | } |
| 20827 | |
| 20828 | /* All non-partitions are interfaces. */ |
| 20829 | interface_p = !is_partition () || cfg.u (); |
| 20830 | |
| 20831 | config.num_imports = cfg.u (); |
| 20832 | config.num_partitions = cfg.u (); |
| 20833 | config.num_entities = cfg.u (); |
| 20834 | |
| 20835 | config.ordinary_locs = cfg.loc (); |
| 20836 | config.macro_locs = cfg.loc (); |
| 20837 | config.loc_range_bits = cfg.u (); |
| 20838 | |
| 20839 | config.active_init = cfg.u (); |
| 20840 | |
| 20841 | done: |
| 20842 | return cfg.end (src: from ()); |
| 20843 | } |
| 20844 | |
| 20845 | /* Comparator for ordering the Ordered Ordinary Location array. */ |
| 20846 | |
| 20847 | static int |
| 20848 | ool_cmp (const void *a_, const void *b_) |
| 20849 | { |
| 20850 | auto *a = *static_cast<const module_state *const *> (a_); |
| 20851 | auto *b = *static_cast<const module_state *const *> (b_); |
| 20852 | if (a == b) |
| 20853 | return 0; |
| 20854 | else if (a->ordinary_locs.first < b->ordinary_locs.first) |
| 20855 | return -1; |
| 20856 | else |
| 20857 | return +1; |
| 20858 | } |
| 20859 | |
| 20860 | /* Use ELROND format to record the following sections: |
| 20861 | qualified-names : binding value(s) |
| 20862 | MOD_SNAME_PFX.README : human readable, strings |
| 20863 | MOD_SNAME_PFX.ENV : environment strings, strings |
| 20864 | MOD_SNAME_PFX.nms : namespace hierarchy |
| 20865 | MOD_SNAME_PFX.udi : namespace using-directives |
| 20866 | MOD_SNAME_PFX.bnd : binding table |
| 20867 | MOD_SNAME_PFX.spc : specialization table |
| 20868 | MOD_SNAME_PFX.imp : import table |
| 20869 | MOD_SNAME_PFX.ent : entity table |
| 20870 | MOD_SNAME_PFX.prt : partitions table |
| 20871 | MOD_SNAME_PFX.olm : ordinary line maps |
| 20872 | MOD_SNAME_PFX.mlm : macro line maps |
| 20873 | MOD_SNAME_PFX.def : macro definitions |
| 20874 | MOD_SNAME_PFX.mac : macro index |
| 20875 | MOD_SNAME_PFX.ini : inits |
| 20876 | MOD_SNAME_PFX.cnt : counts |
| 20877 | MOD_SNAME_PFX.cfg : config data |
| 20878 | */ |
| 20879 | |
| 20880 | bool |
| 20881 | module_state::write_begin (elf_out *to, cpp_reader *reader, |
| 20882 | module_state_config &config, unsigned &crc) |
| 20883 | { |
| 20884 | /* Figure out remapped module numbers, which might elide |
| 20885 | partitions. */ |
| 20886 | bitmap partitions = NULL; |
| 20887 | if (!is_header () && !is_partition ()) |
| 20888 | partitions = BITMAP_GGC_ALLOC (); |
| 20889 | write_init_maps (); |
| 20890 | |
| 20891 | unsigned mod_hwm = 1; |
| 20892 | for (unsigned ix = 1; ix != modules->length (); ix++) |
| 20893 | { |
| 20894 | module_state *imp = (*modules)[ix]; |
| 20895 | |
| 20896 | /* Promote any non-partition direct import from a partition, unless |
| 20897 | we're a partition. */ |
| 20898 | if (!is_partition () && !imp->is_partition () |
| 20899 | && imp->is_partition_direct ()) |
| 20900 | imp->directness = MD_PURVIEW_DIRECT; |
| 20901 | |
| 20902 | /* Write any import that is not a partition, unless we're a |
| 20903 | partition. */ |
| 20904 | if (!partitions || !imp->is_partition ()) |
| 20905 | imp->remap = mod_hwm++; |
| 20906 | else |
| 20907 | { |
| 20908 | dump () && dump ("Partition %M %u" , imp, ix); |
| 20909 | bitmap_set_bit (partitions, ix); |
| 20910 | imp->remap = 0; |
| 20911 | /* All interface partitions must be exported. */ |
| 20912 | if (imp->is_interface () && !bitmap_bit_p (exports, imp->mod)) |
| 20913 | { |
| 20914 | error_at (imp->loc, "interface partition is not exported" ); |
| 20915 | bitmap_set_bit (exports, imp->mod); |
| 20916 | } |
| 20917 | |
| 20918 | /* All the partition entities should have been loaded when |
| 20919 | loading the partition. */ |
| 20920 | if (CHECKING_P) |
| 20921 | for (unsigned jx = 0; jx != imp->entity_num; jx++) |
| 20922 | { |
| 20923 | binding_slot *slot = &(*entity_ary)[imp->entity_lwm + jx]; |
| 20924 | gcc_checking_assert (!slot->is_lazy ()); |
| 20925 | } |
| 20926 | } |
| 20927 | |
| 20928 | if (imp->is_direct () && (imp->remap || imp->is_partition ())) |
| 20929 | note_location (loc: imp->imported_from ()); |
| 20930 | } |
| 20931 | |
| 20932 | if (partitions && bitmap_empty_p (map: partitions)) |
| 20933 | /* No partitions present. */ |
| 20934 | partitions = nullptr; |
| 20935 | |
| 20936 | /* Find the set of decls we must write out. */ |
| 20937 | depset::hash table (DECL_NAMESPACE_BINDINGS (global_namespace)->size () * 8); |
| 20938 | /* Add the specializations before the writables, so that we can |
| 20939 | detect injected friend specializations. */ |
| 20940 | table.add_specializations (decl_p: true); |
| 20941 | table.add_specializations (decl_p: false); |
| 20942 | if (partial_specializations) |
| 20943 | { |
| 20944 | table.add_partial_entities (partial_classes: partial_specializations); |
| 20945 | partial_specializations = NULL; |
| 20946 | } |
| 20947 | table.add_namespace_entities (global_namespace, partitions); |
| 20948 | if (class_members) |
| 20949 | { |
| 20950 | table.add_class_entities (class_members); |
| 20951 | class_members = NULL; |
| 20952 | } |
| 20953 | |
| 20954 | /* Now join everything up. */ |
| 20955 | table.find_dependencies (module: this); |
| 20956 | |
| 20957 | if (!table.finalize_dependencies ()) |
| 20958 | return false; |
| 20959 | |
| 20960 | #if CHECKING_P |
| 20961 | /* We're done verifying at-most once reading, reset to verify |
| 20962 | at-most once writing. */ |
| 20963 | note_defs = note_defs_table_t::create_ggc (n: 1000); |
| 20964 | #endif |
| 20965 | |
| 20966 | /* Determine Strongly Connected Components. This will also strip any |
| 20967 | unnecessary dependencies on imported or TU-local entities. */ |
| 20968 | vec<depset *> sccs = table.connect (); |
| 20969 | |
| 20970 | vec_alloc (v&: ool, nelems: modules->length ()); |
| 20971 | for (unsigned ix = modules->length (); --ix;) |
| 20972 | { |
| 20973 | auto *import = (*modules)[ix]; |
| 20974 | if (import->loadedness > ML_NONE |
| 20975 | && !(partitions && bitmap_bit_p (partitions, import->mod))) |
| 20976 | ool->quick_push (obj: import); |
| 20977 | } |
| 20978 | ool->qsort (ool_cmp); |
| 20979 | |
| 20980 | write_diagnostic_classification (to: nullptr, dc: global_dc, crc_p: nullptr); |
| 20981 | |
| 20982 | vec<cpp_hashnode *> *macros = nullptr; |
| 20983 | if (is_header ()) |
| 20984 | macros = prepare_macros (reader); |
| 20985 | |
| 20986 | config.num_imports = mod_hwm; |
| 20987 | config.num_partitions = modules->length () - mod_hwm; |
| 20988 | auto map_info = write_prepare_maps (cfg: &config, has_partitions: bool (config.num_partitions)); |
| 20989 | unsigned counts[MSC_HWM]; |
| 20990 | memset (s: counts, c: 0, n: sizeof (counts)); |
| 20991 | |
| 20992 | /* depset::cluster is the cluster number, |
| 20993 | depset::section is unspecified scratch value. |
| 20994 | |
| 20995 | The following loops make use of the tarjan property that |
| 20996 | dependencies will be earlier in the SCCS array. */ |
| 20997 | |
| 20998 | /* This first loop determines the number of depsets in each SCC, and |
| 20999 | also the number of namespaces we're dealing with. During the |
| 21000 | loop, the meaning of a couple of depset fields now change: |
| 21001 | |
| 21002 | depset::cluster -> size_of cluster, if first of cluster & !namespace |
| 21003 | depset::section -> section number of cluster (if !namespace). */ |
| 21004 | |
| 21005 | unsigned n_spaces = 0; |
| 21006 | counts[MSC_sec_lwm] = counts[MSC_sec_hwm] = to->get_section_limit (); |
| 21007 | for (unsigned size, ix = 0; ix < sccs.length (); ix += size) |
| 21008 | { |
| 21009 | depset **base = &sccs[ix]; |
| 21010 | |
| 21011 | if (base[0]->get_entity_kind () == depset::EK_NAMESPACE) |
| 21012 | { |
| 21013 | n_spaces++; |
| 21014 | size = 1; |
| 21015 | } |
| 21016 | else |
| 21017 | { |
| 21018 | /* Count the members in this cluster. */ |
| 21019 | for (size = 1; ix + size < sccs.length (); size++) |
| 21020 | if (base[size]->cluster != base[0]->cluster) |
| 21021 | break; |
| 21022 | |
| 21023 | for (unsigned jx = 0; jx != size; jx++) |
| 21024 | { |
| 21025 | /* Set the section number. */ |
| 21026 | base[jx]->cluster = ~(~0u >> 1); /* A bad value. */ |
| 21027 | base[jx]->section = counts[MSC_sec_hwm]; |
| 21028 | } |
| 21029 | |
| 21030 | /* Save the size in the first member's cluster slot. */ |
| 21031 | base[0]->cluster = size; |
| 21032 | |
| 21033 | counts[MSC_sec_hwm]++; |
| 21034 | } |
| 21035 | } |
| 21036 | |
| 21037 | /* Write the clusters. Namespace decls are put in the spaces array. |
| 21038 | The meaning of depset::cluster changes to provide the |
| 21039 | unnamed-decl count of the depset's decl (and remains zero for |
| 21040 | non-decls and non-unnamed). */ |
| 21041 | unsigned bytes = 0; |
| 21042 | vec<depset *> spaces; |
| 21043 | spaces.create (nelems: n_spaces); |
| 21044 | |
| 21045 | for (unsigned size, ix = 0; ix < sccs.length (); ix += size) |
| 21046 | { |
| 21047 | depset **base = &sccs[ix]; |
| 21048 | |
| 21049 | if (base[0]->get_entity_kind () == depset::EK_NAMESPACE) |
| 21050 | { |
| 21051 | tree decl = base[0]->get_entity (); |
| 21052 | if (decl == global_namespace) |
| 21053 | base[0]->cluster = 0; |
| 21054 | else if (!base[0]->is_import ()) |
| 21055 | { |
| 21056 | base[0]->cluster = counts[MSC_entities]++; |
| 21057 | spaces.quick_push (obj: base[0]); |
| 21058 | counts[MSC_namespaces]++; |
| 21059 | if (CHECKING_P) |
| 21060 | { |
| 21061 | /* Add it to the entity map, such that we can tell it is |
| 21062 | part of us. */ |
| 21063 | bool existed; |
| 21064 | unsigned *slot = &entity_map->get_or_insert |
| 21065 | (DECL_UID (decl), existed: &existed); |
| 21066 | if (existed) |
| 21067 | /* It must have come from a partition. */ |
| 21068 | gcc_checking_assert |
| 21069 | (import_entity_module (*slot)->is_partition ()); |
| 21070 | *slot = ~base[0]->cluster; |
| 21071 | } |
| 21072 | dump (dumper::CLUSTER) && dump ("Cluster namespace %N" , decl); |
| 21073 | } |
| 21074 | size = 1; |
| 21075 | } |
| 21076 | else |
| 21077 | { |
| 21078 | size = base[0]->cluster; |
| 21079 | |
| 21080 | /* Cluster is now used to number entities. */ |
| 21081 | base[0]->cluster = ~(~0u >> 1); /* A bad value. */ |
| 21082 | |
| 21083 | sort_cluster (original: &table, scc: base, size); |
| 21084 | |
| 21085 | /* Record the section for consistency checking during stream |
| 21086 | out -- we don't want to start writing decls in different |
| 21087 | sections. */ |
| 21088 | table.section = base[0]->section; |
| 21089 | bytes += write_cluster (to, scc: base, size, table, counts, crc_ptr: &crc); |
| 21090 | table.section = 0; |
| 21091 | } |
| 21092 | } |
| 21093 | |
| 21094 | /* depset::cluster - entity number (on entities) |
| 21095 | depset::section - cluster number */ |
| 21096 | /* We'd better have written as many sections and found as many |
| 21097 | namespaces as we predicted. */ |
| 21098 | gcc_assert (counts[MSC_sec_hwm] == to->get_section_limit () |
| 21099 | && spaces.length () == counts[MSC_namespaces]); |
| 21100 | |
| 21101 | /* Write the entitites. None happens if we contain namespaces or |
| 21102 | nothing. */ |
| 21103 | config.num_entities = counts[MSC_entities]; |
| 21104 | if (counts[MSC_entities]) |
| 21105 | write_entities (to, depsets: sccs, count: counts[MSC_entities], crc_p: &crc); |
| 21106 | |
| 21107 | /* Write the namespaces. */ |
| 21108 | if (counts[MSC_namespaces]) |
| 21109 | write_namespaces (to, spaces, num: counts[MSC_namespaces], crc_p: &crc); |
| 21110 | |
| 21111 | /* Write any using-directives. */ |
| 21112 | if (counts[MSC_namespaces]) |
| 21113 | counts[MSC_using_directives] |
| 21114 | = write_using_directives (to, table, spaces, crc_p: &crc); |
| 21115 | |
| 21116 | /* Write the bindings themselves. */ |
| 21117 | counts[MSC_bindings] = write_bindings (to, sccs, crc_p: &crc); |
| 21118 | |
| 21119 | /* Write the unnamed. */ |
| 21120 | counts[MSC_pendings] = write_pendings (to, depsets: sccs, table, crc_p: &crc); |
| 21121 | |
| 21122 | /* Write the import table. */ |
| 21123 | if (config.num_imports > 1) |
| 21124 | write_imports (to, crc_ptr: &crc); |
| 21125 | |
| 21126 | /* Write elided partition table. */ |
| 21127 | if (config.num_partitions) |
| 21128 | write_partitions (to, count: config.num_partitions, crc_ptr: &crc); |
| 21129 | |
| 21130 | /* Write the line maps. */ |
| 21131 | if (config.ordinary_locs) |
| 21132 | { |
| 21133 | write_ordinary_maps (to, info&: map_info, has_partitions: bool (config.num_partitions), crc_p: &crc); |
| 21134 | write_diagnostic_classification (to, dc: global_dc, crc_p: &crc); |
| 21135 | } |
| 21136 | if (config.macro_locs) |
| 21137 | write_macro_maps (to, info&: map_info, crc_p: &crc); |
| 21138 | |
| 21139 | if (is_header ()) |
| 21140 | { |
| 21141 | counts[MSC_macros] = write_macros (to, macros, crc_p: &crc); |
| 21142 | counts[MSC_inits] = write_inits (to, table, crc_ptr: &crc); |
| 21143 | vec_free (v&: macros); |
| 21144 | } |
| 21145 | |
| 21146 | unsigned clusters = counts[MSC_sec_hwm] - counts[MSC_sec_lwm]; |
| 21147 | dump () && dump ("Wrote %u clusters, average %u bytes/cluster" , |
| 21148 | clusters, (bytes + clusters / 2) / (clusters + !clusters)); |
| 21149 | trees_out::instrument (); |
| 21150 | |
| 21151 | write_counts (to, counts, crc_ptr: &crc); |
| 21152 | |
| 21153 | spaces.release (); |
| 21154 | sccs.release (); |
| 21155 | |
| 21156 | vec_free (v&: macro_loc_remap); |
| 21157 | vec_free (v&: ord_loc_remap); |
| 21158 | vec_free (v&: ool); |
| 21159 | |
| 21160 | // FIXME:QOI: Have a command line switch to control more detailed |
| 21161 | // information (which might leak data you do not want to leak). |
| 21162 | // Perhaps (some of) the write_readme contents should also be |
| 21163 | // so-controlled. |
| 21164 | if (false) |
| 21165 | write_env (to); |
| 21166 | |
| 21167 | return true; |
| 21168 | } |
| 21169 | |
| 21170 | // Finish module writing after we've emitted all dynamic initializers. |
| 21171 | |
| 21172 | void |
| 21173 | module_state::write_end (elf_out *to, cpp_reader *reader, |
| 21174 | module_state_config &config, unsigned &crc) |
| 21175 | { |
| 21176 | /* And finish up. */ |
| 21177 | write_config (to, config, inner_crc: crc); |
| 21178 | |
| 21179 | /* Human-readable info. */ |
| 21180 | write_readme (to, reader, dialect: config.dialect_str); |
| 21181 | |
| 21182 | dump () && dump ("Wrote %u sections" , to->get_section_limit ()); |
| 21183 | } |
| 21184 | |
| 21185 | /* Initial read of a CMI. Checks config, loads up imports and line |
| 21186 | maps. */ |
| 21187 | |
| 21188 | bool |
| 21189 | module_state::read_initial (cpp_reader *reader) |
| 21190 | { |
| 21191 | module_state_config config; |
| 21192 | bool ok = true; |
| 21193 | |
| 21194 | if (ok && !read_config (config)) |
| 21195 | ok = false; |
| 21196 | |
| 21197 | bool have_locs = ok && read_prepare_maps (cfg: &config); |
| 21198 | |
| 21199 | /* Ordinary maps before the imports. */ |
| 21200 | if (!(have_locs && config.ordinary_locs)) |
| 21201 | ordinary_locs.first = line_table->highest_location + 1; |
| 21202 | else if (!read_ordinary_maps (num_ord_locs: config.ordinary_locs, range_bits: config.loc_range_bits)) |
| 21203 | ok = false; |
| 21204 | |
| 21205 | if (ok && have_locs && config.ordinary_locs |
| 21206 | && !read_diagnostic_classification (dc: global_dc)) |
| 21207 | ok = false; |
| 21208 | |
| 21209 | /* Allocate the REMAP vector. */ |
| 21210 | slurp->alloc_remap (size: config.num_imports); |
| 21211 | |
| 21212 | if (ok) |
| 21213 | { |
| 21214 | /* Read the import table. Decrement current to stop this CMI |
| 21215 | from being evicted during the import. */ |
| 21216 | slurp->current--; |
| 21217 | if (config.num_imports > 1 && !read_imports (reader, lmaps: line_table)) |
| 21218 | ok = false; |
| 21219 | slurp->current++; |
| 21220 | } |
| 21221 | |
| 21222 | /* Read the elided partition table, if we're the primary partition. */ |
| 21223 | if (ok && config.num_partitions && is_module () |
| 21224 | && !read_partitions (count: config.num_partitions)) |
| 21225 | ok = false; |
| 21226 | |
| 21227 | /* Determine the module's number. */ |
| 21228 | gcc_checking_assert (mod == MODULE_UNKNOWN); |
| 21229 | gcc_checking_assert (this != this_module ()); |
| 21230 | |
| 21231 | { |
| 21232 | /* Allocate space in the entities array now -- that array must be |
| 21233 | monotonically in step with the modules array. */ |
| 21234 | entity_lwm = vec_safe_length (v: entity_ary); |
| 21235 | entity_num = config.num_entities; |
| 21236 | gcc_checking_assert (modules->length () == 1 |
| 21237 | || modules->last ()->entity_lwm <= entity_lwm); |
| 21238 | vec_safe_reserve (v&: entity_ary, nelems: config.num_entities); |
| 21239 | |
| 21240 | binding_slot slot; |
| 21241 | slot.u.binding = NULL_TREE; |
| 21242 | for (unsigned count = config.num_entities; count--;) |
| 21243 | entity_ary->quick_push (obj: slot); |
| 21244 | } |
| 21245 | |
| 21246 | /* We'll run out of other resources before we run out of module |
| 21247 | indices. */ |
| 21248 | mod = modules->length (); |
| 21249 | vec_safe_push (v&: modules, obj: this); |
| 21250 | |
| 21251 | /* We always import and export ourselves. */ |
| 21252 | bitmap_set_bit (imports, mod); |
| 21253 | bitmap_set_bit (exports, mod); |
| 21254 | |
| 21255 | if (ok) |
| 21256 | (*slurp->remap)[0] = mod << 1; |
| 21257 | dump () && dump ("Assigning %M module number %u" , this, mod); |
| 21258 | |
| 21259 | /* We should not have been frozen during the importing done by |
| 21260 | read_config. */ |
| 21261 | gcc_assert (!from ()->is_frozen ()); |
| 21262 | |
| 21263 | /* Macro maps after the imports. */ |
| 21264 | if (!(ok && have_locs && config.macro_locs)) |
| 21265 | macro_locs.first = LINEMAPS_MACRO_LOWEST_LOCATION (set: line_table); |
| 21266 | else if (!read_macro_maps (num_macro_locs: config.macro_locs)) |
| 21267 | ok = false; |
| 21268 | |
| 21269 | /* Note whether there's an active initializer. */ |
| 21270 | active_init_p = !is_header () && bool (config.active_init); |
| 21271 | |
| 21272 | gcc_assert (slurp->current == ~0u); |
| 21273 | return ok; |
| 21274 | } |
| 21275 | |
| 21276 | /* Read a preprocessor state. */ |
| 21277 | |
| 21278 | bool |
| 21279 | module_state::read_preprocessor (bool outermost) |
| 21280 | { |
| 21281 | gcc_checking_assert (is_header () && slurp |
| 21282 | && slurp->remap_module (0) == mod); |
| 21283 | |
| 21284 | if (loadedness == ML_PREPROCESSOR) |
| 21285 | return !(from () && from ()->get_error ()); |
| 21286 | |
| 21287 | bool ok = true; |
| 21288 | |
| 21289 | /* Read direct header imports. */ |
| 21290 | unsigned len = slurp->remap->length (); |
| 21291 | for (unsigned ix = 1; ok && ix != len; ix++) |
| 21292 | { |
| 21293 | unsigned map = (*slurp->remap)[ix]; |
| 21294 | if (map & 1) |
| 21295 | { |
| 21296 | module_state *import = (*modules)[map >> 1]; |
| 21297 | if (import->is_header ()) |
| 21298 | { |
| 21299 | ok = import->read_preprocessor (outermost: false); |
| 21300 | bitmap_ior_into (slurp->headers, import->slurp->headers); |
| 21301 | } |
| 21302 | } |
| 21303 | } |
| 21304 | |
| 21305 | /* Record as a direct header. */ |
| 21306 | if (ok) |
| 21307 | bitmap_set_bit (slurp->headers, mod); |
| 21308 | |
| 21309 | if (ok && !read_macros ()) |
| 21310 | ok = false; |
| 21311 | |
| 21312 | loadedness = ML_PREPROCESSOR; |
| 21313 | announce (what: "macros" ); |
| 21314 | |
| 21315 | if (flag_preprocess_only) |
| 21316 | /* We're done with the string table. */ |
| 21317 | from ()->release (); |
| 21318 | |
| 21319 | return check_read (outermost, ok); |
| 21320 | } |
| 21321 | |
| 21322 | /* Read language state. */ |
| 21323 | |
| 21324 | bool |
| 21325 | module_state::read_language (bool outermost) |
| 21326 | { |
| 21327 | gcc_checking_assert (!lazy_snum); |
| 21328 | |
| 21329 | if (loadedness == ML_LANGUAGE) |
| 21330 | return !(slurp && from () && from ()->get_error ()); |
| 21331 | |
| 21332 | gcc_checking_assert (slurp && slurp->current == ~0u |
| 21333 | && slurp->remap_module (0) == mod); |
| 21334 | |
| 21335 | bool ok = true; |
| 21336 | |
| 21337 | /* Read direct imports. */ |
| 21338 | unsigned len = slurp->remap->length (); |
| 21339 | for (unsigned ix = 1; ok && ix != len; ix++) |
| 21340 | { |
| 21341 | unsigned map = (*slurp->remap)[ix]; |
| 21342 | if (map & 1) |
| 21343 | { |
| 21344 | module_state *import = (*modules)[map >> 1]; |
| 21345 | if (!import->read_language (outermost: false)) |
| 21346 | ok = false; |
| 21347 | } |
| 21348 | } |
| 21349 | |
| 21350 | unsigned counts[MSC_HWM]; |
| 21351 | |
| 21352 | if (ok && !read_counts (counts)) |
| 21353 | ok = false; |
| 21354 | |
| 21355 | function_depth++; /* Prevent unexpected GCs. */ |
| 21356 | |
| 21357 | if (ok && counts[MSC_entities] != entity_num) |
| 21358 | ok = false; |
| 21359 | if (ok && counts[MSC_entities] |
| 21360 | && !read_entities (count: counts[MSC_entities], |
| 21361 | lwm: counts[MSC_sec_lwm], hwm: counts[MSC_sec_hwm])) |
| 21362 | ok = false; |
| 21363 | |
| 21364 | /* Read the namespace hierarchy. */ |
| 21365 | if (ok && counts[MSC_namespaces] |
| 21366 | && !read_namespaces (num: counts[MSC_namespaces])) |
| 21367 | ok = false; |
| 21368 | |
| 21369 | /* Read any using-directives. */ |
| 21370 | if (ok && counts[MSC_using_directives] |
| 21371 | && !read_using_directives (num: counts[MSC_using_directives])) |
| 21372 | ok = false; |
| 21373 | |
| 21374 | if (ok && !read_bindings (num: counts[MSC_bindings], |
| 21375 | lwm: counts[MSC_sec_lwm], hwm: counts[MSC_sec_hwm])) |
| 21376 | ok = false; |
| 21377 | |
| 21378 | /* And unnamed. */ |
| 21379 | if (ok && counts[MSC_pendings] && !read_pendings (count: counts[MSC_pendings])) |
| 21380 | ok = false; |
| 21381 | |
| 21382 | if (ok) |
| 21383 | { |
| 21384 | slurp->remaining = counts[MSC_sec_hwm] - counts[MSC_sec_lwm]; |
| 21385 | available_clusters += counts[MSC_sec_hwm] - counts[MSC_sec_lwm]; |
| 21386 | } |
| 21387 | |
| 21388 | if (!flag_module_lazy |
| 21389 | || (is_partition () |
| 21390 | && module_interface_p () |
| 21391 | && !module_partition_p ())) |
| 21392 | { |
| 21393 | /* Read the sections in forward order, so that dependencies are read |
| 21394 | first. See note about tarjan_connect. */ |
| 21395 | ggc_collect (); |
| 21396 | |
| 21397 | lazy_snum = ~0u; |
| 21398 | |
| 21399 | unsigned hwm = counts[MSC_sec_hwm]; |
| 21400 | for (unsigned ix = counts[MSC_sec_lwm]; ok && ix != hwm; ix++) |
| 21401 | if (!load_section (snum: ix, NULL)) |
| 21402 | { |
| 21403 | ok = false; |
| 21404 | break; |
| 21405 | } |
| 21406 | lazy_snum = 0; |
| 21407 | post_load_processing (); |
| 21408 | |
| 21409 | ggc_collect (); |
| 21410 | |
| 21411 | if (ok && CHECKING_P) |
| 21412 | for (unsigned ix = 0; ix != entity_num; ix++) |
| 21413 | gcc_assert (!(*entity_ary)[ix + entity_lwm].is_lazy ()); |
| 21414 | } |
| 21415 | |
| 21416 | // If the import is a header-unit, we need to register initializers |
| 21417 | // of any static objects it contains (looking at you _Ioinit). |
| 21418 | // Notice, the ordering of these initializers will be that of a |
| 21419 | // dynamic initializer at this point in the current TU. (Other |
| 21420 | // instances of these objects in other TUs will be initialized as |
| 21421 | // part of that TU's global initializers.) |
| 21422 | if (ok && counts[MSC_inits] && !read_inits (count: counts[MSC_inits])) |
| 21423 | ok = false; |
| 21424 | |
| 21425 | function_depth--; |
| 21426 | |
| 21427 | announce (flag_module_lazy ? "lazy" : "imported" ); |
| 21428 | loadedness = ML_LANGUAGE; |
| 21429 | |
| 21430 | gcc_assert (slurp->current == ~0u); |
| 21431 | |
| 21432 | /* We're done with the string table. */ |
| 21433 | from ()->release (); |
| 21434 | |
| 21435 | return check_read (outermost, ok); |
| 21436 | } |
| 21437 | |
| 21438 | bool |
| 21439 | module_state::maybe_defrost () |
| 21440 | { |
| 21441 | bool ok = true; |
| 21442 | if (from ()->is_frozen ()) |
| 21443 | { |
| 21444 | if (lazy_open >= lazy_limit) |
| 21445 | freeze_an_elf (); |
| 21446 | dump () && dump ("Defrosting '%s'" , filename); |
| 21447 | ok = from ()->defrost (name: maybe_add_cmi_prefix (to: filename)); |
| 21448 | lazy_open++; |
| 21449 | } |
| 21450 | |
| 21451 | return ok; |
| 21452 | } |
| 21453 | |
| 21454 | /* Load section SNUM, dealing with laziness. It doesn't matter if we |
| 21455 | have multiple concurrent loads, because we do not use TREE_VISITED |
| 21456 | when reading back in. */ |
| 21457 | |
| 21458 | bool |
| 21459 | module_state::load_section (unsigned snum, binding_slot *mslot) |
| 21460 | { |
| 21461 | if (from ()->get_error ()) |
| 21462 | return false; |
| 21463 | |
| 21464 | if (snum >= slurp->current) |
| 21465 | from ()->set_error (elf::E_BAD_LAZY); |
| 21466 | else if (maybe_defrost ()) |
| 21467 | { |
| 21468 | unsigned old_current = slurp->current; |
| 21469 | slurp->current = snum; |
| 21470 | slurp->lru = 0; /* Do not swap out. */ |
| 21471 | slurp->remaining--; |
| 21472 | read_cluster (snum); |
| 21473 | slurp->lru = ++lazy_lru; |
| 21474 | slurp->current = old_current; |
| 21475 | } |
| 21476 | |
| 21477 | if (mslot && mslot->is_lazy ()) |
| 21478 | { |
| 21479 | /* Oops, the section didn't set this slot. */ |
| 21480 | from ()->set_error (elf::E_BAD_DATA); |
| 21481 | *mslot = NULL_TREE; |
| 21482 | } |
| 21483 | |
| 21484 | bool ok = !from ()->get_error (); |
| 21485 | if (!ok) |
| 21486 | { |
| 21487 | error_at (loc, "failed to read compiled module cluster %u: %s" , |
| 21488 | snum, from ()->get_error (name: filename)); |
| 21489 | note_cmi_name (); |
| 21490 | } |
| 21491 | |
| 21492 | maybe_completed_reading (); |
| 21493 | |
| 21494 | return ok; |
| 21495 | } |
| 21496 | |
| 21497 | void |
| 21498 | module_state::maybe_completed_reading () |
| 21499 | { |
| 21500 | if (loadedness == ML_LANGUAGE && slurp->current == ~0u && !slurp->remaining) |
| 21501 | { |
| 21502 | lazy_open--; |
| 21503 | /* We no longer need the macros, all tokenizing has been done. */ |
| 21504 | slurp->release_macros (); |
| 21505 | |
| 21506 | from ()->end (); |
| 21507 | slurp->close (); |
| 21508 | slurped (); |
| 21509 | } |
| 21510 | } |
| 21511 | |
| 21512 | /* After a reading operation, make sure things are still ok. If not, |
| 21513 | emit an error and clean up. */ |
| 21514 | |
| 21515 | bool |
| 21516 | module_state::check_read (bool outermost, bool ok) |
| 21517 | { |
| 21518 | gcc_checking_assert (!outermost || slurp->current == ~0u); |
| 21519 | |
| 21520 | if (!ok) |
| 21521 | from ()->set_error (); |
| 21522 | |
| 21523 | if (int e = from ()->get_error ()) |
| 21524 | { |
| 21525 | auto_diagnostic_group d; |
| 21526 | error_at (loc, "failed to read compiled module: %s" , |
| 21527 | from ()->get_error (name: filename)); |
| 21528 | note_cmi_name (); |
| 21529 | |
| 21530 | if (e == EMFILE |
| 21531 | || e == ENFILE |
| 21532 | #if MAPPED_READING |
| 21533 | || e == ENOMEM |
| 21534 | #endif |
| 21535 | || false) |
| 21536 | inform (loc, "consider using %<-fno-module-lazy%>," |
| 21537 | " increasing %<-param-lazy-modules=%u%> value," |
| 21538 | " or increasing the per-process file descriptor limit" , |
| 21539 | param_lazy_modules); |
| 21540 | else if (e == ENOENT) |
| 21541 | inform (loc, "imports must be built before being imported" ); |
| 21542 | |
| 21543 | if (outermost) |
| 21544 | fatal_error (loc, "returning to the gate for a mechanical issue" ); |
| 21545 | |
| 21546 | ok = false; |
| 21547 | } |
| 21548 | |
| 21549 | maybe_completed_reading (); |
| 21550 | |
| 21551 | return ok; |
| 21552 | } |
| 21553 | |
| 21554 | /* Return the IDENTIFIER_NODE naming module IX. This is the name |
| 21555 | including dots. */ |
| 21556 | |
| 21557 | char const * |
| 21558 | module_name (unsigned ix, bool ) |
| 21559 | { |
| 21560 | if (modules) |
| 21561 | { |
| 21562 | module_state *imp = (*modules)[ix]; |
| 21563 | |
| 21564 | if (ix && !imp->name) |
| 21565 | imp = imp->parent; |
| 21566 | |
| 21567 | if (header_ok || !imp->is_header ()) |
| 21568 | return imp->get_flatname (); |
| 21569 | } |
| 21570 | |
| 21571 | return NULL; |
| 21572 | } |
| 21573 | |
| 21574 | /* Return the bitmap describing what modules are imported. Remember, |
| 21575 | we always import ourselves. */ |
| 21576 | |
| 21577 | bitmap |
| 21578 | get_import_bitmap () |
| 21579 | { |
| 21580 | return this_module ()->imports; |
| 21581 | } |
| 21582 | |
| 21583 | /* Get the original decl for an instantiation at TINST, or NULL_TREE |
| 21584 | if we're not an instantiation. */ |
| 21585 | |
| 21586 | static tree |
| 21587 | orig_decl_for_instantiation (tinst_level *tinst) |
| 21588 | { |
| 21589 | if (!tinst || TREE_CODE (tinst->tldcl) == TEMPLATE_FOR_STMT) |
| 21590 | return NULL_TREE; |
| 21591 | |
| 21592 | tree decl = tinst->tldcl; |
| 21593 | if (TREE_CODE (decl) == TREE_LIST) |
| 21594 | decl = TREE_PURPOSE (decl); |
| 21595 | if (TYPE_P (decl)) |
| 21596 | decl = TYPE_NAME (decl); |
| 21597 | return decl; |
| 21598 | } |
| 21599 | |
| 21600 | /* Return the visible imports and path of instantiation for an |
| 21601 | instantiation at TINST. If TINST is nullptr, we're not in an |
| 21602 | instantiation, and thus will return the visible imports of the |
| 21603 | current TU (and NULL *PATH_MAP_P). We cache the information on |
| 21604 | the tinst level itself. */ |
| 21605 | |
| 21606 | static bitmap |
| 21607 | path_of_instantiation (tinst_level *tinst, bitmap *path_map_p) |
| 21608 | { |
| 21609 | gcc_checking_assert (modules_p ()); |
| 21610 | |
| 21611 | tree decl = orig_decl_for_instantiation (tinst); |
| 21612 | if (!decl) |
| 21613 | { |
| 21614 | gcc_assert (!tinst || !tinst->next); |
| 21615 | /* Not inside an instantiation, just the regular case. */ |
| 21616 | *path_map_p = nullptr; |
| 21617 | return get_import_bitmap (); |
| 21618 | } |
| 21619 | |
| 21620 | if (!tinst->path) |
| 21621 | { |
| 21622 | /* Calculate. */ |
| 21623 | bitmap visible = path_of_instantiation (tinst: tinst->next, path_map_p); |
| 21624 | bitmap path_map = *path_map_p; |
| 21625 | |
| 21626 | if (!path_map) |
| 21627 | { |
| 21628 | path_map = BITMAP_GGC_ALLOC (); |
| 21629 | bitmap_set_bit (path_map, 0); |
| 21630 | } |
| 21631 | |
| 21632 | if (unsigned mod = get_originating_module (decl)) |
| 21633 | if (!bitmap_bit_p (path_map, mod)) |
| 21634 | { |
| 21635 | /* This is brand new information! */ |
| 21636 | bitmap new_path = BITMAP_GGC_ALLOC (); |
| 21637 | bitmap_copy (new_path, path_map); |
| 21638 | bitmap_set_bit (new_path, mod); |
| 21639 | path_map = new_path; |
| 21640 | |
| 21641 | bitmap imports = (*modules)[mod]->imports; |
| 21642 | if (bitmap_intersect_compl_p (imports, visible)) |
| 21643 | { |
| 21644 | /* IMPORTS contains additional modules to VISIBLE. */ |
| 21645 | bitmap new_visible = BITMAP_GGC_ALLOC (); |
| 21646 | |
| 21647 | bitmap_ior (new_visible, visible, imports); |
| 21648 | visible = new_visible; |
| 21649 | } |
| 21650 | } |
| 21651 | |
| 21652 | tinst->path = path_map; |
| 21653 | tinst->visible = visible; |
| 21654 | } |
| 21655 | |
| 21656 | *path_map_p = tinst->path; |
| 21657 | return tinst->visible; |
| 21658 | } |
| 21659 | |
| 21660 | /* Return the bitmap describing what modules are visible along the |
| 21661 | path of instantiation. If we're not an instantiation, this will be |
| 21662 | the visible imports of the TU. *PATH_MAP_P is filled in with the |
| 21663 | modules owning the instantiation path -- we see the module-linkage |
| 21664 | entities of those modules. */ |
| 21665 | |
| 21666 | bitmap |
| 21667 | visible_instantiation_path (bitmap *path_map_p) |
| 21668 | { |
| 21669 | if (!modules_p ()) |
| 21670 | return NULL; |
| 21671 | |
| 21672 | return path_of_instantiation (tinst: current_instantiation (), path_map_p); |
| 21673 | } |
| 21674 | |
| 21675 | /* Returns the bitmap describing what modules were visible from the |
| 21676 | module that the current instantiation originated from. If we're |
| 21677 | not an instantiation, returns NULL. *MODULE_P is filled in with |
| 21678 | the originating module of the definition for this instantiation. */ |
| 21679 | |
| 21680 | bitmap |
| 21681 | visible_from_instantiation_origination (unsigned *module_p) |
| 21682 | { |
| 21683 | if (!modules_p ()) |
| 21684 | return NULL; |
| 21685 | |
| 21686 | tree decl = orig_decl_for_instantiation (tinst: current_instantiation ()); |
| 21687 | if (!decl) |
| 21688 | return NULL; |
| 21689 | |
| 21690 | *module_p = get_originating_module (decl); |
| 21691 | return (*modules)[*module_p]->imports; |
| 21692 | } |
| 21693 | |
| 21694 | /* We've just directly imported IMPORT. Update our import/export |
| 21695 | bitmaps. IS_EXPORT is true if we're reexporting the OTHER. */ |
| 21696 | |
| 21697 | void |
| 21698 | module_state::set_import (module_state const *import, bool is_export) |
| 21699 | { |
| 21700 | gcc_checking_assert (this != import); |
| 21701 | |
| 21702 | /* We see IMPORT's exports (which includes IMPORT). If IMPORT is |
| 21703 | the primary interface or a partition we'll see its imports. */ |
| 21704 | bitmap_ior_into (imports, import->is_module () || import->is_partition () |
| 21705 | ? import->imports : import->exports); |
| 21706 | |
| 21707 | if (is_export) |
| 21708 | /* We'll export OTHER's exports. */ |
| 21709 | bitmap_ior_into (exports, import->exports); |
| 21710 | } |
| 21711 | |
| 21712 | /* Return the declaring entity of DECL. That is the decl determining |
| 21713 | how to decorate DECL with module information. Returns NULL_TREE if |
| 21714 | it's the global module. */ |
| 21715 | |
| 21716 | tree |
| 21717 | get_originating_module_decl (tree decl) |
| 21718 | { |
| 21719 | /* An enumeration constant. */ |
| 21720 | if (TREE_CODE (decl) == CONST_DECL |
| 21721 | && DECL_CONTEXT (decl) |
| 21722 | && (TREE_CODE (DECL_CONTEXT (decl)) == ENUMERAL_TYPE)) |
| 21723 | decl = TYPE_NAME (DECL_CONTEXT (decl)); |
| 21724 | else if (TREE_CODE (decl) == FIELD_DECL |
| 21725 | || TREE_CODE (decl) == USING_DECL |
| 21726 | || CONST_DECL_USING_P (decl)) |
| 21727 | { |
| 21728 | decl = DECL_CONTEXT (decl); |
| 21729 | if (TREE_CODE (decl) != FUNCTION_DECL) |
| 21730 | decl = TYPE_NAME (decl); |
| 21731 | } |
| 21732 | |
| 21733 | gcc_checking_assert (TREE_CODE (decl) == TEMPLATE_DECL |
| 21734 | || TREE_CODE (decl) == FUNCTION_DECL |
| 21735 | || TREE_CODE (decl) == TYPE_DECL |
| 21736 | || TREE_CODE (decl) == VAR_DECL |
| 21737 | || TREE_CODE (decl) == CONCEPT_DECL |
| 21738 | || TREE_CODE (decl) == NAMESPACE_DECL); |
| 21739 | |
| 21740 | for (;;) |
| 21741 | { |
| 21742 | /* Uninstantiated template friends are owned by the befriending |
| 21743 | class -- not their context. */ |
| 21744 | if (TREE_CODE (decl) == TEMPLATE_DECL |
| 21745 | && DECL_UNINSTANTIATED_TEMPLATE_FRIEND_P (decl)) |
| 21746 | decl = TYPE_NAME (DECL_CHAIN (decl)); |
| 21747 | |
| 21748 | /* An imported temploid friend is attached to the same module the |
| 21749 | befriending class was. */ |
| 21750 | if (imported_temploid_friends) |
| 21751 | if (tree *slot = imported_temploid_friends->get (k: decl)) |
| 21752 | decl = *slot; |
| 21753 | |
| 21754 | int use; |
| 21755 | if (tree ti = node_template_info (decl, use)) |
| 21756 | { |
| 21757 | decl = TI_TEMPLATE (ti); |
| 21758 | if (TREE_CODE (decl) != TEMPLATE_DECL) |
| 21759 | { |
| 21760 | /* A friend template specialization. */ |
| 21761 | gcc_checking_assert (OVL_P (decl)); |
| 21762 | return global_namespace; |
| 21763 | } |
| 21764 | } |
| 21765 | else |
| 21766 | { |
| 21767 | tree ctx = CP_DECL_CONTEXT (decl); |
| 21768 | if (TREE_CODE (ctx) == NAMESPACE_DECL) |
| 21769 | break; |
| 21770 | |
| 21771 | if (TYPE_P (ctx)) |
| 21772 | { |
| 21773 | ctx = TYPE_NAME (ctx); |
| 21774 | if (!ctx) |
| 21775 | { |
| 21776 | /* Some kind of internal type. */ |
| 21777 | gcc_checking_assert (DECL_ARTIFICIAL (decl)); |
| 21778 | return global_namespace; |
| 21779 | } |
| 21780 | } |
| 21781 | decl = ctx; |
| 21782 | } |
| 21783 | } |
| 21784 | |
| 21785 | return decl; |
| 21786 | } |
| 21787 | |
| 21788 | /* If DECL is imported, return which module imported it, or 0 for the current |
| 21789 | module. Except that if GLOBAL_M1, return -1 for decls attached to the |
| 21790 | global module. */ |
| 21791 | |
| 21792 | int |
| 21793 | get_originating_module (tree decl, bool global_m1) |
| 21794 | { |
| 21795 | tree owner = get_originating_module_decl (decl); |
| 21796 | tree not_tmpl = STRIP_TEMPLATE (owner); |
| 21797 | |
| 21798 | if (!DECL_LANG_SPECIFIC (not_tmpl)) |
| 21799 | return global_m1 ? -1 : 0; |
| 21800 | |
| 21801 | if (global_m1 && !DECL_MODULE_ATTACH_P (not_tmpl)) |
| 21802 | return -1; |
| 21803 | |
| 21804 | int mod = !DECL_MODULE_IMPORT_P (not_tmpl) ? 0 : get_importing_module (owner); |
| 21805 | gcc_checking_assert (!global_m1 || !(*modules)[mod]->is_header ()); |
| 21806 | return mod; |
| 21807 | } |
| 21808 | |
| 21809 | /* DECL is imported, return which module imported it. |
| 21810 | If FLEXIBLE, return -1 if not found, otherwise checking ICE. */ |
| 21811 | |
| 21812 | unsigned |
| 21813 | get_importing_module (tree decl, bool flexible) |
| 21814 | { |
| 21815 | unsigned index = import_entity_index (decl, null_ok: flexible); |
| 21816 | if (index == ~(~0u >> 1)) |
| 21817 | return -1; |
| 21818 | module_state *module = import_entity_module (index); |
| 21819 | |
| 21820 | return module->mod; |
| 21821 | } |
| 21822 | |
| 21823 | /* Is it permissible to redeclare OLDDECL with NEWDECL. |
| 21824 | |
| 21825 | If NEWDECL is NULL, assumes that OLDDECL will be redeclared using |
| 21826 | the current scope's module and attachment. */ |
| 21827 | |
| 21828 | bool |
| 21829 | module_may_redeclare (tree olddecl, tree newdecl) |
| 21830 | { |
| 21831 | tree decl = olddecl; |
| 21832 | for (;;) |
| 21833 | { |
| 21834 | tree ctx = CP_DECL_CONTEXT (decl); |
| 21835 | if (TREE_CODE (ctx) == NAMESPACE_DECL) |
| 21836 | // Found the namespace-scope decl. |
| 21837 | break; |
| 21838 | if (!CLASS_TYPE_P (ctx)) |
| 21839 | // We've met a non-class scope. Such a thing is not |
| 21840 | // reopenable, so we must be ok. |
| 21841 | return true; |
| 21842 | decl = TYPE_NAME (ctx); |
| 21843 | } |
| 21844 | |
| 21845 | int use_tpl = 0; |
| 21846 | if (node_template_info (STRIP_TEMPLATE (decl), use&: use_tpl) && use_tpl) |
| 21847 | // Specializations of any kind can be redeclared anywhere. |
| 21848 | // FIXME: Should we be checking this in more places on the scope chain? |
| 21849 | return true; |
| 21850 | |
| 21851 | module_state *old_mod = get_primary (parent: this_module ()); |
| 21852 | module_state *new_mod = old_mod; |
| 21853 | |
| 21854 | tree old_origin = get_originating_module_decl (decl); |
| 21855 | tree old_inner = STRIP_TEMPLATE (old_origin); |
| 21856 | bool olddecl_attached_p = (DECL_LANG_SPECIFIC (old_inner) |
| 21857 | && DECL_MODULE_ATTACH_P (old_inner)); |
| 21858 | if (DECL_LANG_SPECIFIC (old_inner) && DECL_MODULE_IMPORT_P (old_inner)) |
| 21859 | { |
| 21860 | unsigned index = import_entity_index (decl: old_origin); |
| 21861 | old_mod = get_primary (parent: import_entity_module (index)); |
| 21862 | } |
| 21863 | |
| 21864 | bool newdecl_attached_p = module_attach_p (); |
| 21865 | if (newdecl) |
| 21866 | { |
| 21867 | tree new_origin = get_originating_module_decl (decl: newdecl); |
| 21868 | tree new_inner = STRIP_TEMPLATE (new_origin); |
| 21869 | newdecl_attached_p = (DECL_LANG_SPECIFIC (new_inner) |
| 21870 | && DECL_MODULE_ATTACH_P (new_inner)); |
| 21871 | if (DECL_LANG_SPECIFIC (new_inner) && DECL_MODULE_IMPORT_P (new_inner)) |
| 21872 | { |
| 21873 | unsigned index = import_entity_index (decl: new_origin); |
| 21874 | new_mod = get_primary (parent: import_entity_module (index)); |
| 21875 | } |
| 21876 | } |
| 21877 | |
| 21878 | /* Module attachment needs to match. */ |
| 21879 | if (olddecl_attached_p == newdecl_attached_p) |
| 21880 | { |
| 21881 | if (!olddecl_attached_p) |
| 21882 | /* Both are GM entities, OK. */ |
| 21883 | return true; |
| 21884 | |
| 21885 | if (new_mod == old_mod) |
| 21886 | /* Both attached to same named module, OK. */ |
| 21887 | return true; |
| 21888 | } |
| 21889 | |
| 21890 | /* Attached to different modules, error. */ |
| 21891 | decl = newdecl ? newdecl : olddecl; |
| 21892 | location_t loc = newdecl ? DECL_SOURCE_LOCATION (newdecl) : input_location; |
| 21893 | if (DECL_IS_UNDECLARED_BUILTIN (olddecl)) |
| 21894 | { |
| 21895 | if (newdecl_attached_p) |
| 21896 | error_at (loc, "declaring %qD in module %qs conflicts with builtin " |
| 21897 | "in global module" , decl, new_mod->get_flatname ()); |
| 21898 | else |
| 21899 | error_at (loc, "declaration %qD conflicts with builtin" , decl); |
| 21900 | } |
| 21901 | else if (DECL_LANG_SPECIFIC (old_inner) && DECL_MODULE_IMPORT_P (old_inner)) |
| 21902 | { |
| 21903 | auto_diagnostic_group d; |
| 21904 | if (newdecl_attached_p) |
| 21905 | error_at (loc, "redeclaring %qD in module %qs conflicts with import" , |
| 21906 | decl, new_mod->get_flatname ()); |
| 21907 | else |
| 21908 | error_at (loc, "redeclaring %qD in global module conflicts with import" , |
| 21909 | decl); |
| 21910 | |
| 21911 | if (olddecl_attached_p) |
| 21912 | inform (DECL_SOURCE_LOCATION (olddecl), |
| 21913 | "import declared attached to module %qs" , |
| 21914 | old_mod->get_flatname ()); |
| 21915 | else |
| 21916 | inform (DECL_SOURCE_LOCATION (olddecl), |
| 21917 | "import declared in global module" ); |
| 21918 | } |
| 21919 | else |
| 21920 | { |
| 21921 | auto_diagnostic_group d; |
| 21922 | if (newdecl_attached_p) |
| 21923 | error_at (loc, "conflicting declaration of %qD in module %qs" , |
| 21924 | decl, new_mod->get_flatname ()); |
| 21925 | else |
| 21926 | error_at (loc, "conflicting declaration of %qD in global module" , |
| 21927 | decl); |
| 21928 | |
| 21929 | if (olddecl_attached_p) |
| 21930 | inform (DECL_SOURCE_LOCATION (olddecl), |
| 21931 | "previously declared in module %qs" , |
| 21932 | old_mod->get_flatname ()); |
| 21933 | else |
| 21934 | inform (DECL_SOURCE_LOCATION (olddecl), |
| 21935 | "previously declared in global module" ); |
| 21936 | } |
| 21937 | return false; |
| 21938 | } |
| 21939 | |
| 21940 | /* DECL is being created by this TU. Record it came from here. We |
| 21941 | record module purview, so we can see if partial or explicit |
| 21942 | specialization needs to be written out, even though its purviewness |
| 21943 | comes from the most general template. */ |
| 21944 | |
| 21945 | void |
| 21946 | set_instantiating_module (tree decl) |
| 21947 | { |
| 21948 | gcc_assert (TREE_CODE (decl) == FUNCTION_DECL |
| 21949 | || VAR_P (decl) |
| 21950 | || TREE_CODE (decl) == TYPE_DECL |
| 21951 | || TREE_CODE (decl) == CONCEPT_DECL |
| 21952 | || TREE_CODE (decl) == TEMPLATE_DECL |
| 21953 | || TREE_CODE (decl) == CONST_DECL |
| 21954 | || (TREE_CODE (decl) == NAMESPACE_DECL |
| 21955 | && DECL_NAMESPACE_ALIAS (decl))); |
| 21956 | |
| 21957 | if (!modules_p ()) |
| 21958 | return; |
| 21959 | |
| 21960 | decl = STRIP_TEMPLATE (decl); |
| 21961 | |
| 21962 | if (!DECL_LANG_SPECIFIC (decl) && module_purview_p ()) |
| 21963 | retrofit_lang_decl (decl); |
| 21964 | |
| 21965 | if (DECL_LANG_SPECIFIC (decl)) |
| 21966 | { |
| 21967 | DECL_MODULE_PURVIEW_P (decl) = module_purview_p (); |
| 21968 | /* If this was imported, we'll still be in the entity_hash. */ |
| 21969 | DECL_MODULE_IMPORT_P (decl) = false; |
| 21970 | } |
| 21971 | } |
| 21972 | |
| 21973 | /* If DECL is a class member, whose class is not defined in this TU |
| 21974 | (it was imported), remember this decl. */ |
| 21975 | |
| 21976 | void |
| 21977 | set_defining_module (tree decl) |
| 21978 | { |
| 21979 | gcc_checking_assert (!DECL_LANG_SPECIFIC (decl) |
| 21980 | || !DECL_MODULE_IMPORT_P (decl)); |
| 21981 | |
| 21982 | if (module_maybe_has_cmi_p ()) |
| 21983 | { |
| 21984 | /* We need to track all declarations within a module, not just those |
| 21985 | in the module purview, because we don't necessarily know yet if |
| 21986 | this module will require a CMI while in the global fragment. */ |
| 21987 | tree ctx = DECL_CONTEXT (decl); |
| 21988 | if (ctx |
| 21989 | && (TREE_CODE (ctx) == RECORD_TYPE || TREE_CODE (ctx) == UNION_TYPE) |
| 21990 | && DECL_LANG_SPECIFIC (TYPE_NAME (ctx)) |
| 21991 | && DECL_MODULE_IMPORT_P (TYPE_NAME (ctx))) |
| 21992 | { |
| 21993 | /* This entity's context is from an import. We may need to |
| 21994 | record this entity to make sure we emit it in the CMI. |
| 21995 | Template specializations are in the template hash tables, |
| 21996 | so we don't need to record them here as well. */ |
| 21997 | int use_tpl = -1; |
| 21998 | tree ti = node_template_info (decl, use&: use_tpl); |
| 21999 | if (use_tpl <= 0) |
| 22000 | { |
| 22001 | if (ti) |
| 22002 | { |
| 22003 | gcc_checking_assert (!use_tpl); |
| 22004 | /* Get to the TEMPLATE_DECL. */ |
| 22005 | decl = TI_TEMPLATE (ti); |
| 22006 | } |
| 22007 | |
| 22008 | /* Record it on the class_members list. */ |
| 22009 | vec_safe_push (v&: class_members, obj: decl); |
| 22010 | } |
| 22011 | } |
| 22012 | } |
| 22013 | } |
| 22014 | |
| 22015 | /* Also remember DECL if it's a newly declared class template partial |
| 22016 | specialization, because these are not necessarily added to the |
| 22017 | instantiation tables. */ |
| 22018 | |
| 22019 | void |
| 22020 | set_defining_module_for_partial_spec (tree decl) |
| 22021 | { |
| 22022 | if (module_maybe_has_cmi_p () |
| 22023 | && DECL_IMPLICIT_TYPEDEF_P (decl) |
| 22024 | && CLASSTYPE_TEMPLATE_SPECIALIZATION (TREE_TYPE (decl))) |
| 22025 | vec_safe_push (v&: partial_specializations, obj: decl); |
| 22026 | } |
| 22027 | |
| 22028 | void |
| 22029 | set_originating_module (tree decl, bool friend_p ATTRIBUTE_UNUSED) |
| 22030 | { |
| 22031 | set_instantiating_module (decl); |
| 22032 | |
| 22033 | if (!DECL_NAMESPACE_SCOPE_P (decl)) |
| 22034 | return; |
| 22035 | |
| 22036 | gcc_checking_assert (friend_p || decl == get_originating_module_decl (decl)); |
| 22037 | |
| 22038 | if (module_attach_p ()) |
| 22039 | { |
| 22040 | retrofit_lang_decl (decl); |
| 22041 | DECL_MODULE_ATTACH_P (decl) = true; |
| 22042 | } |
| 22043 | |
| 22044 | /* It is ill-formed to export a declaration with internal linkage. However, |
| 22045 | at the point this function is called we don't yet always know whether this |
| 22046 | declaration has internal linkage; instead we defer this check for callers |
| 22047 | to do once visibility has been determined. */ |
| 22048 | if (module_exporting_p ()) |
| 22049 | DECL_MODULE_EXPORT_P (decl) = true; |
| 22050 | } |
| 22051 | |
| 22052 | /* Checks whether DECL within a module unit has valid linkage for its kind. |
| 22053 | Must be called after visibility for DECL has been finalised. */ |
| 22054 | |
| 22055 | void |
| 22056 | check_module_decl_linkage (tree decl) |
| 22057 | { |
| 22058 | if (!module_has_cmi_p ()) |
| 22059 | return; |
| 22060 | |
| 22061 | /* A header unit shall not contain a definition of a non-inline function |
| 22062 | or variable (not template) whose name has external linkage. */ |
| 22063 | if (header_module_p () |
| 22064 | && !processing_template_decl |
| 22065 | && ((TREE_CODE (decl) == FUNCTION_DECL |
| 22066 | && !DECL_DECLARED_INLINE_P (decl)) |
| 22067 | || (TREE_CODE (decl) == VAR_DECL |
| 22068 | && !DECL_INLINE_VAR_P (decl))) |
| 22069 | && decl_defined_p (decl) |
| 22070 | && !(DECL_LANG_SPECIFIC (decl) |
| 22071 | && DECL_TEMPLATE_INSTANTIATION (decl)) |
| 22072 | && decl_linkage (decl) == lk_external) |
| 22073 | error_at (DECL_SOURCE_LOCATION (decl), |
| 22074 | "external linkage definition of %qD in header module must " |
| 22075 | "be declared %<inline%>" , decl); |
| 22076 | |
| 22077 | /* An internal-linkage declaration cannot be generally be exported. |
| 22078 | But it's OK to export any declaration from a header unit, including |
| 22079 | internal linkage declarations. */ |
| 22080 | if (!header_module_p () && DECL_MODULE_EXPORT_P (decl)) |
| 22081 | { |
| 22082 | /* Let's additionally treat any exported declaration within an |
| 22083 | internal namespace as exporting a declaration with internal |
| 22084 | linkage, as this would also implicitly export the internal |
| 22085 | linkage namespace. */ |
| 22086 | if (decl_internal_context_p (decl)) |
| 22087 | { |
| 22088 | error_at (DECL_SOURCE_LOCATION (decl), |
| 22089 | "exporting declaration %qD declared in unnamed namespace" , |
| 22090 | decl); |
| 22091 | DECL_MODULE_EXPORT_P (decl) = false; |
| 22092 | } |
| 22093 | else if (decl_linkage (decl) == lk_internal) |
| 22094 | { |
| 22095 | error_at (DECL_SOURCE_LOCATION (decl), |
| 22096 | "exporting declaration %qD with internal linkage" , decl); |
| 22097 | DECL_MODULE_EXPORT_P (decl) = false; |
| 22098 | } |
| 22099 | } |
| 22100 | } |
| 22101 | |
| 22102 | /* Given a scope CTX, find the scope we want to attach the key to, |
| 22103 | or NULL if no key scope is required. */ |
| 22104 | |
| 22105 | static tree |
| 22106 | adjust_key_scope (tree ctx) |
| 22107 | { |
| 22108 | /* For members, key it to the containing type to handle deduplication |
| 22109 | correctly. For fields, this is necessary as FIELD_DECLs have no |
| 22110 | dep and so would only be streamed after the lambda type, defeating |
| 22111 | our ability to merge them. |
| 22112 | |
| 22113 | Other class-scope key decls might depend on the type of the lambda |
| 22114 | but be within the same cluster; we need to ensure that we never |
| 22115 | first see the key decl while streaming the lambda type as merging |
| 22116 | would then fail when comparing the partially-streamed lambda type |
| 22117 | of the key decl with the existing (PR c++/122310). |
| 22118 | |
| 22119 | Perhaps sort_cluster can be adjusted to handle this better, but |
| 22120 | this is a simple workaround (and might down on the number of |
| 22121 | entries in keyed_table as a bonus). */ |
| 22122 | while (!DECL_NAMESPACE_SCOPE_P (ctx)) |
| 22123 | if (DECL_CLASS_SCOPE_P (ctx)) |
| 22124 | ctx = TYPE_NAME (DECL_CONTEXT (ctx)); |
| 22125 | else |
| 22126 | ctx = DECL_CONTEXT (ctx); |
| 22127 | |
| 22128 | return ctx; |
| 22129 | } |
| 22130 | |
| 22131 | /* DECL is keyed to CTX for odr purposes. */ |
| 22132 | |
| 22133 | void |
| 22134 | maybe_key_decl (tree ctx, tree decl) |
| 22135 | { |
| 22136 | if (!modules_p ()) |
| 22137 | return; |
| 22138 | |
| 22139 | /* We only need to deal here with decls attached to var, field, |
| 22140 | parm, type, function, or concept decls. */ |
| 22141 | if (TREE_CODE (ctx) != VAR_DECL |
| 22142 | && TREE_CODE (ctx) != FIELD_DECL |
| 22143 | && TREE_CODE (ctx) != PARM_DECL |
| 22144 | && TREE_CODE (ctx) != TYPE_DECL |
| 22145 | && TREE_CODE (ctx) != FUNCTION_DECL |
| 22146 | && TREE_CODE (ctx) != CONCEPT_DECL) |
| 22147 | return; |
| 22148 | |
| 22149 | gcc_checking_assert (LAMBDA_TYPE_P (TREE_TYPE (decl)) |
| 22150 | || TREE_CODE (ctx) == FUNCTION_DECL); |
| 22151 | |
| 22152 | /* We don't need to use the keyed map for functions with definitions, |
| 22153 | as we can instead use the MK_local_type handling for streaming. */ |
| 22154 | if (TREE_CODE (ctx) == FUNCTION_DECL |
| 22155 | && (has_definition (decl: ctx) |
| 22156 | /* If we won't be streaming this definition there's also no |
| 22157 | need to record the key, as it will not be useful for merging |
| 22158 | (this function is non-inline and so a matching declaration |
| 22159 | will always be an ODR violation anyway). */ |
| 22160 | || !module_maybe_has_cmi_p ())) |
| 22161 | return; |
| 22162 | |
| 22163 | ctx = adjust_key_scope (ctx); |
| 22164 | |
| 22165 | if (!keyed_table) |
| 22166 | keyed_table = new keyed_map_t (EXPERIMENT (1, 400)); |
| 22167 | |
| 22168 | auto &vec = keyed_table->get_or_insert (k: ctx); |
| 22169 | if (!vec.length ()) |
| 22170 | { |
| 22171 | retrofit_lang_decl (ctx); |
| 22172 | DECL_MODULE_KEYED_DECLS_P (ctx) = true; |
| 22173 | } |
| 22174 | if (CHECKING_P) |
| 22175 | for (tree t : vec) |
| 22176 | gcc_checking_assert (t != decl); |
| 22177 | |
| 22178 | vec.safe_push (obj: decl); |
| 22179 | } |
| 22180 | |
| 22181 | /* Find the scope that the local type or lambda DECL is keyed to, if any. */ |
| 22182 | |
| 22183 | static tree |
| 22184 | get_keyed_decl_scope (tree decl) |
| 22185 | { |
| 22186 | gcc_checking_assert (DECL_IMPLICIT_TYPEDEF_P (STRIP_TEMPLATE (decl))); |
| 22187 | |
| 22188 | tree scope = (LAMBDA_TYPE_P (TREE_TYPE (decl)) |
| 22189 | ? LAMBDA_TYPE_EXTRA_SCOPE (TREE_TYPE (decl)) |
| 22190 | : CP_DECL_CONTEXT (decl)); |
| 22191 | if (!scope) |
| 22192 | return NULL_TREE; |
| 22193 | |
| 22194 | gcc_checking_assert (TREE_CODE (scope) == VAR_DECL |
| 22195 | || TREE_CODE (scope) == FIELD_DECL |
| 22196 | || TREE_CODE (scope) == PARM_DECL |
| 22197 | || TREE_CODE (scope) == TYPE_DECL |
| 22198 | || (TREE_CODE (scope) == FUNCTION_DECL |
| 22199 | && !has_definition (scope)) |
| 22200 | || TREE_CODE (scope) == CONCEPT_DECL); |
| 22201 | |
| 22202 | scope = adjust_key_scope (ctx: scope); |
| 22203 | |
| 22204 | gcc_checking_assert (scope |
| 22205 | && DECL_LANG_SPECIFIC (scope) |
| 22206 | && DECL_MODULE_KEYED_DECLS_P (scope)); |
| 22207 | return scope; |
| 22208 | } |
| 22209 | |
| 22210 | /* DECL is an instantiated friend that should be attached to the same |
| 22211 | module that ORIG is. */ |
| 22212 | |
| 22213 | void |
| 22214 | propagate_defining_module (tree decl, tree orig) |
| 22215 | { |
| 22216 | if (!modules_p ()) |
| 22217 | return; |
| 22218 | |
| 22219 | tree not_tmpl = STRIP_TEMPLATE (orig); |
| 22220 | if (DECL_LANG_SPECIFIC (not_tmpl) && DECL_MODULE_ATTACH_P (not_tmpl)) |
| 22221 | { |
| 22222 | tree inner = STRIP_TEMPLATE (decl); |
| 22223 | retrofit_lang_decl (inner); |
| 22224 | DECL_MODULE_ATTACH_P (inner) = true; |
| 22225 | } |
| 22226 | |
| 22227 | if (DECL_LANG_SPECIFIC (not_tmpl) && DECL_MODULE_IMPORT_P (not_tmpl)) |
| 22228 | { |
| 22229 | bool exists = imported_temploid_friends->put (k: decl, v: orig); |
| 22230 | |
| 22231 | /* We should only be called if lookup for an existing decl |
| 22232 | failed, in which case there shouldn't already be an entry |
| 22233 | in the map. */ |
| 22234 | gcc_assert (!exists); |
| 22235 | } |
| 22236 | } |
| 22237 | |
| 22238 | /* NEWDECL matched with OLDDECL, transfer defining module information |
| 22239 | onto OLDDECL. We've already validated attachment matches. */ |
| 22240 | |
| 22241 | void |
| 22242 | transfer_defining_module (tree olddecl, tree newdecl) |
| 22243 | { |
| 22244 | if (!modules_p ()) |
| 22245 | return; |
| 22246 | |
| 22247 | tree old_inner = STRIP_TEMPLATE (olddecl); |
| 22248 | tree new_inner = STRIP_TEMPLATE (newdecl); |
| 22249 | |
| 22250 | if (DECL_LANG_SPECIFIC (new_inner)) |
| 22251 | { |
| 22252 | gcc_checking_assert (DECL_LANG_SPECIFIC (old_inner)); |
| 22253 | if (DECL_MODULE_PURVIEW_P (new_inner)) |
| 22254 | DECL_MODULE_PURVIEW_P (old_inner) = true; |
| 22255 | if (!DECL_MODULE_IMPORT_P (new_inner)) |
| 22256 | DECL_MODULE_IMPORT_P (old_inner) = false; |
| 22257 | } |
| 22258 | |
| 22259 | if (tree *p = imported_temploid_friends->get (k: newdecl)) |
| 22260 | { |
| 22261 | tree orig = *p; |
| 22262 | tree &slot = imported_temploid_friends->get_or_insert (k: olddecl); |
| 22263 | if (!slot) |
| 22264 | slot = orig; |
| 22265 | else if (slot != orig) |
| 22266 | /* This can happen when multiple classes declare the same |
| 22267 | friend function (e.g. g++.dg/modules/tpl-friend-4); |
| 22268 | make sure we at least attach to the same module. */ |
| 22269 | gcc_checking_assert (get_originating_module (slot) |
| 22270 | == get_originating_module (orig)); |
| 22271 | } |
| 22272 | } |
| 22273 | |
| 22274 | /* DECL is being freed, clear data we don't need anymore. */ |
| 22275 | |
| 22276 | void |
| 22277 | remove_defining_module (tree decl) |
| 22278 | { |
| 22279 | if (!modules_p ()) |
| 22280 | return; |
| 22281 | |
| 22282 | if (imported_temploid_friends) |
| 22283 | imported_temploid_friends->remove (k: decl); |
| 22284 | } |
| 22285 | |
| 22286 | /* Create the flat name string. It is simplest to have it handy. */ |
| 22287 | |
| 22288 | void |
| 22289 | module_state::set_flatname () |
| 22290 | { |
| 22291 | gcc_checking_assert (!flatname); |
| 22292 | if (parent) |
| 22293 | { |
| 22294 | auto_vec<tree,5> ids; |
| 22295 | size_t len = 0; |
| 22296 | char const *primary = NULL; |
| 22297 | size_t pfx_len = 0; |
| 22298 | |
| 22299 | for (module_state *probe = this; |
| 22300 | probe; |
| 22301 | probe = probe->parent) |
| 22302 | if (is_partition () && !probe->is_partition ()) |
| 22303 | { |
| 22304 | primary = probe->get_flatname (); |
| 22305 | pfx_len = strlen (s: primary); |
| 22306 | break; |
| 22307 | } |
| 22308 | else |
| 22309 | { |
| 22310 | ids.safe_push (obj: probe->name); |
| 22311 | len += IDENTIFIER_LENGTH (probe->name) + 1; |
| 22312 | } |
| 22313 | |
| 22314 | char *flat = XNEWVEC (char, pfx_len + len + is_partition ()); |
| 22315 | flatname = flat; |
| 22316 | |
| 22317 | if (primary) |
| 22318 | { |
| 22319 | memcpy (dest: flat, src: primary, n: pfx_len); |
| 22320 | flat += pfx_len; |
| 22321 | *flat++ = ':'; |
| 22322 | } |
| 22323 | |
| 22324 | for (unsigned len = 0; ids.length ();) |
| 22325 | { |
| 22326 | if (len) |
| 22327 | flat[len++] = '.'; |
| 22328 | tree elt = ids.pop (); |
| 22329 | unsigned l = IDENTIFIER_LENGTH (elt); |
| 22330 | memcpy (dest: flat + len, IDENTIFIER_POINTER (elt), n: l + 1); |
| 22331 | len += l; |
| 22332 | } |
| 22333 | } |
| 22334 | else if (is_header ()) |
| 22335 | flatname = TREE_STRING_POINTER (name); |
| 22336 | else |
| 22337 | flatname = IDENTIFIER_POINTER (name); |
| 22338 | } |
| 22339 | |
| 22340 | /* Open the GCM file and prepare to read. Return whether that was |
| 22341 | successful. */ |
| 22342 | |
| 22343 | bool |
| 22344 | module_state::open_slurp (cpp_reader *reader) |
| 22345 | { |
| 22346 | if (slurp) |
| 22347 | return true; |
| 22348 | |
| 22349 | if (lazy_open >= lazy_limit) |
| 22350 | freeze_an_elf (); |
| 22351 | |
| 22352 | int fd = -1; |
| 22353 | int e = ENOENT; |
| 22354 | if (filename) |
| 22355 | { |
| 22356 | const char *file = maybe_add_cmi_prefix (to: filename); |
| 22357 | dump () && dump ("CMI is %s" , file); |
| 22358 | if (note_module_cmi_yes || inform_cmi_p) |
| 22359 | inform (loc, "reading CMI %qs" , file); |
| 22360 | /* Add the CMI file to the dependency tracking. */ |
| 22361 | if (cpp_get_deps (reader)) |
| 22362 | deps_add_dep (cpp_get_deps (reader), file); |
| 22363 | fd = open (file: file, O_RDONLY | O_CLOEXEC | O_BINARY); |
| 22364 | e = errno; |
| 22365 | } |
| 22366 | |
| 22367 | gcc_checking_assert (!slurp); |
| 22368 | slurp = new slurping (new elf_in (fd, e)); |
| 22369 | |
| 22370 | bool ok = from ()->begin (loc); |
| 22371 | if (ok) |
| 22372 | { |
| 22373 | lazy_open++; |
| 22374 | slurp->lru = ++lazy_lru; |
| 22375 | } |
| 22376 | return ok; |
| 22377 | } |
| 22378 | |
| 22379 | /* Return whether importing this GCM would work without an error in |
| 22380 | read_config. */ |
| 22381 | |
| 22382 | bool |
| 22383 | module_state::check_importable (cpp_reader *reader) |
| 22384 | { |
| 22385 | if (loadedness > ML_CONFIG) |
| 22386 | return true; |
| 22387 | if (!open_slurp (reader)) |
| 22388 | return false; |
| 22389 | module_state_config config; |
| 22390 | return read_config (config, /*complain*/false); |
| 22391 | } |
| 22392 | |
| 22393 | /* Read the CMI file for a module. */ |
| 22394 | |
| 22395 | bool |
| 22396 | module_state::do_import (cpp_reader *reader, bool outermost) |
| 22397 | { |
| 22398 | gcc_assert (global_namespace == current_scope () && loadedness == ML_NONE); |
| 22399 | |
| 22400 | /* If this TU is a partition of the module we're importing, |
| 22401 | that module is the primary module interface. */ |
| 22402 | if (this_module ()->is_partition () |
| 22403 | && this == get_primary (parent: this_module ())) |
| 22404 | module_p = true; |
| 22405 | |
| 22406 | loc = linemap_module_loc (line_table, from: loc, name: get_flatname ()); |
| 22407 | |
| 22408 | bool ok = open_slurp (reader); |
| 22409 | if (!from ()->get_error ()) |
| 22410 | { |
| 22411 | announce (what: "importing" ); |
| 22412 | loadedness = ML_CONFIG; |
| 22413 | ok = read_initial (reader); |
| 22414 | } |
| 22415 | |
| 22416 | gcc_assert (slurp->current == ~0u); |
| 22417 | |
| 22418 | return check_read (outermost, ok); |
| 22419 | } |
| 22420 | |
| 22421 | /* Attempt to increase the file descriptor limit. */ |
| 22422 | |
| 22423 | static bool |
| 22424 | try_increase_lazy (unsigned want) |
| 22425 | { |
| 22426 | gcc_checking_assert (lazy_open >= lazy_limit); |
| 22427 | |
| 22428 | /* If we're increasing, saturate at hard limit. */ |
| 22429 | if (want > lazy_hard_limit && lazy_limit < lazy_hard_limit) |
| 22430 | want = lazy_hard_limit; |
| 22431 | |
| 22432 | #if HAVE_SETRLIMIT |
| 22433 | if ((!lazy_limit || !param_lazy_modules) |
| 22434 | && lazy_hard_limit |
| 22435 | && want <= lazy_hard_limit) |
| 22436 | { |
| 22437 | struct rlimit rlimit; |
| 22438 | rlimit.rlim_cur = want + LAZY_HEADROOM; |
| 22439 | rlimit.rlim_max = lazy_hard_limit + LAZY_HEADROOM; |
| 22440 | if (!setrlimit (RLIMIT_NOFILE, rlimits: &rlimit)) |
| 22441 | lazy_limit = want; |
| 22442 | } |
| 22443 | #endif |
| 22444 | |
| 22445 | return lazy_open < lazy_limit; |
| 22446 | } |
| 22447 | |
| 22448 | /* Pick a victim module to freeze its reader. */ |
| 22449 | |
| 22450 | void |
| 22451 | module_state::freeze_an_elf () |
| 22452 | { |
| 22453 | if (try_increase_lazy (want: lazy_open * 2)) |
| 22454 | return; |
| 22455 | |
| 22456 | module_state *victim = NULL; |
| 22457 | for (unsigned ix = modules->length (); ix--;) |
| 22458 | { |
| 22459 | module_state *candidate = (*modules)[ix]; |
| 22460 | if (candidate && candidate->slurp && candidate->slurp->lru |
| 22461 | && candidate->from ()->is_freezable () |
| 22462 | && (!victim || victim->slurp->lru > candidate->slurp->lru)) |
| 22463 | victim = candidate; |
| 22464 | } |
| 22465 | |
| 22466 | if (victim) |
| 22467 | { |
| 22468 | dump () && dump ("Freezing '%s'" , victim->filename); |
| 22469 | if (victim->slurp->macro_defs.size) |
| 22470 | /* Save the macro definitions to a buffer. */ |
| 22471 | victim->from ()->preserve (bytes&: victim->slurp->macro_defs); |
| 22472 | if (victim->slurp->macro_tbl.size) |
| 22473 | /* Save the macro definitions to a buffer. */ |
| 22474 | victim->from ()->preserve (bytes&: victim->slurp->macro_tbl); |
| 22475 | victim->from ()->freeze (); |
| 22476 | lazy_open--; |
| 22477 | } |
| 22478 | else |
| 22479 | dump () && dump ("No module available for freezing" ); |
| 22480 | } |
| 22481 | |
| 22482 | /* Load the lazy slot *MSLOT, INDEX'th slot of the module. */ |
| 22483 | |
| 22484 | bool |
| 22485 | module_state::lazy_load (unsigned index, binding_slot *mslot) |
| 22486 | { |
| 22487 | unsigned n = dump.push (m: this); |
| 22488 | |
| 22489 | gcc_checking_assert (function_depth); |
| 22490 | |
| 22491 | unsigned cookie = mslot->get_lazy (); |
| 22492 | unsigned snum = cookie >> 2; |
| 22493 | dump () && dump ("Loading entity %M[%u] section:%u" , this, index, snum); |
| 22494 | |
| 22495 | bool ok = load_section (snum, mslot); |
| 22496 | |
| 22497 | dump.pop (n); |
| 22498 | |
| 22499 | return ok; |
| 22500 | } |
| 22501 | |
| 22502 | /* Load MOD's binding for NS::ID into *MSLOT. *MSLOT contains the |
| 22503 | lazy cookie. OUTER is true if this is the outermost lazy, (used |
| 22504 | for diagnostics). */ |
| 22505 | |
| 22506 | void |
| 22507 | lazy_load_binding (unsigned mod, tree ns, tree id, binding_slot *mslot) |
| 22508 | { |
| 22509 | int count = errorcount + warningcount; |
| 22510 | |
| 22511 | bool timer_running = timevar_cond_start (TV_MODULE_IMPORT); |
| 22512 | |
| 22513 | /* Make sure lazy loading from a template context behaves as if |
| 22514 | from a non-template context. */ |
| 22515 | processing_template_decl_sentinel ptds; |
| 22516 | |
| 22517 | /* Stop GC happening, even in outermost loads (because our caller |
| 22518 | could well be building up a lookup set). */ |
| 22519 | function_depth++; |
| 22520 | |
| 22521 | gcc_checking_assert (mod); |
| 22522 | module_state *module = (*modules)[mod]; |
| 22523 | unsigned n = dump.push (m: module); |
| 22524 | |
| 22525 | unsigned snum = mslot->get_lazy (); |
| 22526 | dump () && dump ("Lazily binding %P@%N section:%u" , ns, id, |
| 22527 | module->name, snum); |
| 22528 | |
| 22529 | bool ok = !recursive_lazy (snum); |
| 22530 | if (ok) |
| 22531 | { |
| 22532 | ok = module->load_section (snum, mslot); |
| 22533 | lazy_snum = 0; |
| 22534 | post_load_processing (); |
| 22535 | } |
| 22536 | |
| 22537 | dump.pop (n); |
| 22538 | |
| 22539 | function_depth--; |
| 22540 | |
| 22541 | timevar_cond_stop (TV_MODULE_IMPORT, timer_running); |
| 22542 | |
| 22543 | if (!ok) |
| 22544 | fatal_error (input_location, |
| 22545 | module->is_header () |
| 22546 | ? G_("failed to load binding %<%E%s%E%>" ) |
| 22547 | : G_("failed to load binding %<%E%s%E@%s%>" ), |
| 22548 | ns, &"::" [ns == global_namespace ? 2 : 0], id, |
| 22549 | module->get_flatname ()); |
| 22550 | |
| 22551 | if (count != errorcount + warningcount) |
| 22552 | inform (input_location, |
| 22553 | module->is_header () |
| 22554 | ? G_("during load of binding %<%E%s%E%>" ) |
| 22555 | : G_("during load of binding %<%E%s%E@%s%>" ), |
| 22556 | ns, &"::" [ns == global_namespace ? 2 : 0], id, |
| 22557 | module->get_flatname ()); |
| 22558 | } |
| 22559 | |
| 22560 | /* Load any pending entities keyed to the top-key of DECL. */ |
| 22561 | |
| 22562 | void |
| 22563 | lazy_load_pendings (tree decl) |
| 22564 | { |
| 22565 | /* Make sure lazy loading from a template context behaves as if |
| 22566 | from a non-template context. */ |
| 22567 | processing_template_decl_sentinel ptds; |
| 22568 | |
| 22569 | tree key_decl; |
| 22570 | pending_key key; |
| 22571 | key.ns = find_pending_key (decl, decl_p: &key_decl); |
| 22572 | key.id = DECL_NAME (key_decl); |
| 22573 | |
| 22574 | auto *pending_vec = pending_table ? pending_table->get (k: key) : nullptr; |
| 22575 | if (!pending_vec) |
| 22576 | return; |
| 22577 | |
| 22578 | int count = errorcount + warningcount; |
| 22579 | |
| 22580 | bool timer_running = timevar_cond_start (TV_MODULE_IMPORT); |
| 22581 | bool ok = !recursive_lazy (); |
| 22582 | if (ok) |
| 22583 | { |
| 22584 | function_depth++; /* Prevent GC */ |
| 22585 | unsigned n = dump.push (NULL); |
| 22586 | dump () && dump ("Reading %u pending entities keyed to %P" , |
| 22587 | pending_vec->length (), key.ns, key.id); |
| 22588 | for (unsigned ix = pending_vec->length (); ix--;) |
| 22589 | { |
| 22590 | unsigned index = (*pending_vec)[ix]; |
| 22591 | binding_slot *slot = &(*entity_ary)[index]; |
| 22592 | |
| 22593 | if (slot->is_lazy ()) |
| 22594 | { |
| 22595 | module_state *import = import_entity_module (index); |
| 22596 | if (!import->lazy_load (index: index - import->entity_lwm, mslot: slot)) |
| 22597 | ok = false; |
| 22598 | } |
| 22599 | else if (dump ()) |
| 22600 | { |
| 22601 | module_state *import = import_entity_module (index); |
| 22602 | dump () && dump ("Entity %M[%u] already loaded" , |
| 22603 | import, index - import->entity_lwm); |
| 22604 | } |
| 22605 | } |
| 22606 | |
| 22607 | pending_table->remove (k: key); |
| 22608 | dump.pop (n); |
| 22609 | lazy_snum = 0; |
| 22610 | post_load_processing (); |
| 22611 | function_depth--; |
| 22612 | } |
| 22613 | |
| 22614 | timevar_cond_stop (TV_MODULE_IMPORT, timer_running); |
| 22615 | |
| 22616 | if (!ok) |
| 22617 | fatal_error (input_location, "failed to load pendings for %<%E%s%E%>" , |
| 22618 | key.ns, &"::" [key.ns == global_namespace ? 2 : 0], key.id); |
| 22619 | |
| 22620 | if (count != errorcount + warningcount) |
| 22621 | inform (input_location, "during load of pendings for %<%E%s%E%>" , |
| 22622 | key.ns, &"::" [key.ns == global_namespace ? 2 : 0], key.id); |
| 22623 | } |
| 22624 | |
| 22625 | static void |
| 22626 | direct_import (module_state *import, cpp_reader *reader) |
| 22627 | { |
| 22628 | timevar_start (TV_MODULE_IMPORT); |
| 22629 | unsigned n = dump.push (m: import); |
| 22630 | |
| 22631 | gcc_checking_assert (import->is_direct () && import->has_location ()); |
| 22632 | if (import->loadedness == ML_NONE) |
| 22633 | if (!import->do_import (reader, outermost: true)) |
| 22634 | gcc_unreachable (); |
| 22635 | |
| 22636 | this_module ()->set_import (import, is_export: import->exported_p); |
| 22637 | |
| 22638 | if (import->loadedness < ML_LANGUAGE) |
| 22639 | { |
| 22640 | if (!keyed_table) |
| 22641 | keyed_table = new keyed_map_t (EXPERIMENT (1, 400)); |
| 22642 | import->read_language (outermost: true); |
| 22643 | } |
| 22644 | |
| 22645 | dump.pop (n); |
| 22646 | timevar_stop (TV_MODULE_IMPORT); |
| 22647 | } |
| 22648 | |
| 22649 | /* Import module IMPORT. */ |
| 22650 | |
| 22651 | void |
| 22652 | import_module (module_state *import, location_t from_loc, bool exporting_p, |
| 22653 | tree, cpp_reader *reader) |
| 22654 | { |
| 22655 | /* A non-partition implementation unit has no name. */ |
| 22656 | if (!this_module ()->name && this_module ()->parent == import) |
| 22657 | { |
| 22658 | auto_diagnostic_group d; |
| 22659 | error_at (from_loc, "import of %qs within its own implementation unit" , |
| 22660 | import->get_flatname()); |
| 22661 | inform (import->loc, "module declared here" ); |
| 22662 | return; |
| 22663 | } |
| 22664 | |
| 22665 | if (!import->check_circular_import (from: from_loc)) |
| 22666 | return; |
| 22667 | |
| 22668 | if (!import->is_header () && current_lang_depth ()) |
| 22669 | /* Only header units should appear inside language |
| 22670 | specifications. The std doesn't specify this, but I think |
| 22671 | that's an error in resolving US 033, because language linkage |
| 22672 | is also our escape clause to getting things into the global |
| 22673 | module, so we don't want to confuse things by having to think |
| 22674 | about whether 'extern "C++" { import foo; }' puts foo's |
| 22675 | contents into the global module all of a sudden. */ |
| 22676 | warning (0, "import of named module %qs inside language-linkage block" , |
| 22677 | import->get_flatname ()); |
| 22678 | |
| 22679 | if (exporting_p || module_exporting_p ()) |
| 22680 | import->exported_p = true; |
| 22681 | |
| 22682 | if (import->loadedness != ML_NONE) |
| 22683 | { |
| 22684 | from_loc = ordinary_loc_of (lmaps: line_table, from: from_loc); |
| 22685 | linemap_module_reparent (line_table, loc: import->loc, new_parent: from_loc); |
| 22686 | } |
| 22687 | |
| 22688 | gcc_checking_assert (import->is_direct () && import->has_location ()); |
| 22689 | |
| 22690 | direct_import (import, reader); |
| 22691 | } |
| 22692 | |
| 22693 | /* Declare the name of the current module to be NAME. EXPORTING_p is |
| 22694 | true if this TU is the exporting module unit. */ |
| 22695 | |
| 22696 | void |
| 22697 | declare_module (module_state *module, location_t from_loc, bool exporting_p, |
| 22698 | tree, cpp_reader *reader) |
| 22699 | { |
| 22700 | gcc_assert (global_namespace == current_scope ()); |
| 22701 | |
| 22702 | module_state *current = this_module (); |
| 22703 | if (module_purview_p () || module->loadedness > ML_CONFIG) |
| 22704 | { |
| 22705 | auto_diagnostic_group d; |
| 22706 | error_at (from_loc, module_purview_p () |
| 22707 | ? G_("module already declared" ) |
| 22708 | : G_("module already imported" )); |
| 22709 | if (module_purview_p ()) |
| 22710 | module = current; |
| 22711 | inform (module->loc, module_purview_p () |
| 22712 | ? G_("module %qs declared here" ) |
| 22713 | : G_("module %qs imported here" ), |
| 22714 | module->get_flatname ()); |
| 22715 | return; |
| 22716 | } |
| 22717 | |
| 22718 | gcc_checking_assert (module->is_module ()); |
| 22719 | gcc_checking_assert (module->is_direct () && module->has_location ()); |
| 22720 | |
| 22721 | /* Yer a module, 'arry. */ |
| 22722 | module_kind = module->is_header () ? MK_HEADER : MK_NAMED | MK_ATTACH; |
| 22723 | |
| 22724 | // Even in header units, we consider the decls to be purview |
| 22725 | module_kind |= MK_PURVIEW; |
| 22726 | |
| 22727 | if (module->is_partition ()) |
| 22728 | module_kind |= MK_PARTITION; |
| 22729 | if (exporting_p) |
| 22730 | { |
| 22731 | module->interface_p = true; |
| 22732 | module_kind |= MK_INTERFACE; |
| 22733 | } |
| 22734 | |
| 22735 | if (module_has_cmi_p ()) |
| 22736 | { |
| 22737 | /* Copy the importing information we may have already done. We |
| 22738 | do not need to separate out the imports that only happen in |
| 22739 | the GMF, inspite of what the literal wording of the std |
| 22740 | might imply. See p2191, the core list had a discussion |
| 22741 | where the module implementors agreed that the GMF of a named |
| 22742 | module is invisible to importers. */ |
| 22743 | module->imports = current->imports; |
| 22744 | |
| 22745 | module->mod = 0; |
| 22746 | (*modules)[0] = module; |
| 22747 | } |
| 22748 | else |
| 22749 | { |
| 22750 | module->interface_p = true; |
| 22751 | current->parent = module; /* So mangler knows module identity. */ |
| 22752 | direct_import (import: module, reader); |
| 22753 | } |
| 22754 | } |
| 22755 | |
| 22756 | /* Return true IFF we must emit a module global initializer function |
| 22757 | (which will be called by importers' init code). */ |
| 22758 | |
| 22759 | bool |
| 22760 | module_global_init_needed () |
| 22761 | { |
| 22762 | return module_has_cmi_p () && !header_module_p (); |
| 22763 | } |
| 22764 | |
| 22765 | /* Calculate which, if any, import initializers need calling. */ |
| 22766 | |
| 22767 | bool |
| 22768 | module_determine_import_inits () |
| 22769 | { |
| 22770 | if (!modules || header_module_p ()) |
| 22771 | return false; |
| 22772 | |
| 22773 | /* Prune active_init_p. We need the same bitmap allocation |
| 22774 | scheme as for the imports member. */ |
| 22775 | function_depth++; /* Disable GC. */ |
| 22776 | bitmap covered_imports (BITMAP_GGC_ALLOC ()); |
| 22777 | |
| 22778 | bool any = false; |
| 22779 | |
| 22780 | /* Because indirect imports are before their direct import, and |
| 22781 | we're scanning the array backwards, we only need one pass! */ |
| 22782 | for (unsigned ix = modules->length (); --ix;) |
| 22783 | { |
| 22784 | module_state *import = (*modules)[ix]; |
| 22785 | |
| 22786 | if (!import->active_init_p) |
| 22787 | ; |
| 22788 | else if (bitmap_bit_p (covered_imports, ix)) |
| 22789 | import->active_init_p = false; |
| 22790 | else |
| 22791 | { |
| 22792 | /* Everything this imports is therefore handled by its |
| 22793 | initializer, so doesn't need initializing by us. */ |
| 22794 | bitmap_ior_into (covered_imports, import->imports); |
| 22795 | any = true; |
| 22796 | } |
| 22797 | } |
| 22798 | function_depth--; |
| 22799 | |
| 22800 | return any; |
| 22801 | } |
| 22802 | |
| 22803 | /* Emit calls to each direct import's global initializer. Including |
| 22804 | direct imports of directly imported header units. The initializers |
| 22805 | of (static) entities in header units will be called by their |
| 22806 | importing modules (for the instance contained within that), or by |
| 22807 | the current TU (for the instances we've brought in). Of course |
| 22808 | such header unit behaviour is evil, but iostream went through that |
| 22809 | door some time ago. */ |
| 22810 | |
| 22811 | void |
| 22812 | module_add_import_initializers () |
| 22813 | { |
| 22814 | if (!modules || header_module_p ()) |
| 22815 | return; |
| 22816 | |
| 22817 | tree fntype = build_function_type (void_type_node, void_list_node); |
| 22818 | releasing_vec args; // There are no args |
| 22819 | |
| 22820 | for (unsigned ix = modules->length (); --ix;) |
| 22821 | { |
| 22822 | module_state *import = (*modules)[ix]; |
| 22823 | if (import->active_init_p) |
| 22824 | { |
| 22825 | tree name = mangle_module_global_init (ix); |
| 22826 | tree fndecl = build_lang_decl (FUNCTION_DECL, name, fntype); |
| 22827 | |
| 22828 | DECL_CONTEXT (fndecl) = FROB_CONTEXT (global_namespace); |
| 22829 | SET_DECL_ASSEMBLER_NAME (fndecl, name); |
| 22830 | TREE_PUBLIC (fndecl) = true; |
| 22831 | determine_visibility (fndecl); |
| 22832 | |
| 22833 | tree call = cp_build_function_call_vec (fndecl, &args, |
| 22834 | tf_warning_or_error); |
| 22835 | finish_expr_stmt (call); |
| 22836 | } |
| 22837 | } |
| 22838 | } |
| 22839 | |
| 22840 | /* NAME & LEN are a preprocessed header name, possibly including the |
| 22841 | surrounding "" or <> characters. Return the raw string name of the |
| 22842 | module to which it refers. This will be an absolute path, or begin |
| 22843 | with ./, so it is immediately distinguishable from a (non-header |
| 22844 | unit) module name. If READER is non-null, ask the preprocessor to |
| 22845 | locate the header to which it refers using the appropriate include |
| 22846 | path. Note that we do never do \ processing of the string, as that |
| 22847 | matches the preprocessor's behaviour. */ |
| 22848 | |
| 22849 | static const char * |
| 22850 | (cpp_reader *reader, location_t loc, bool unquoted, |
| 22851 | const char *str, size_t &len_r) |
| 22852 | { |
| 22853 | size_t len = len_r; |
| 22854 | static char *buf = 0; |
| 22855 | static size_t alloc = 0; |
| 22856 | |
| 22857 | if (!unquoted) |
| 22858 | { |
| 22859 | gcc_checking_assert (len >= 2 |
| 22860 | && ((reader && str[0] == '<' && str[len-1] == '>') |
| 22861 | || (str[0] == '"' && str[len-1] == '"'))); |
| 22862 | str += 1; |
| 22863 | len -= 2; |
| 22864 | } |
| 22865 | |
| 22866 | if (reader) |
| 22867 | { |
| 22868 | gcc_assert (!unquoted); |
| 22869 | |
| 22870 | if (len >= alloc) |
| 22871 | { |
| 22872 | alloc = len + 1; |
| 22873 | buf = XRESIZEVEC (char, buf, alloc); |
| 22874 | } |
| 22875 | memcpy (dest: buf, src: str, n: len); |
| 22876 | buf[len] = 0; |
| 22877 | |
| 22878 | if (const char *hdr |
| 22879 | = cpp_probe_header_unit (reader, file: buf, angle_p: str[-1] == '<', loc)) |
| 22880 | { |
| 22881 | len = strlen (s: hdr); |
| 22882 | str = hdr; |
| 22883 | } |
| 22884 | else |
| 22885 | str = buf; |
| 22886 | } |
| 22887 | |
| 22888 | if (!(str[0] == '.' ? IS_DIR_SEPARATOR (str[1]) : IS_ABSOLUTE_PATH (str))) |
| 22889 | { |
| 22890 | /* Prepend './' */ |
| 22891 | if (len + 3 > alloc) |
| 22892 | { |
| 22893 | alloc = len + 3; |
| 22894 | buf = XRESIZEVEC (char, buf, alloc); |
| 22895 | } |
| 22896 | |
| 22897 | buf[0] = '.'; |
| 22898 | buf[1] = DIR_SEPARATOR; |
| 22899 | memmove (dest: buf + 2, src: str, n: len); |
| 22900 | len += 2; |
| 22901 | buf[len] = 0; |
| 22902 | str = buf; |
| 22903 | } |
| 22904 | |
| 22905 | len_r = len; |
| 22906 | return str; |
| 22907 | } |
| 22908 | |
| 22909 | /* Set the CMI name from a cody packet. Issue an error if |
| 22910 | ill-formed. */ |
| 22911 | |
| 22912 | void module_state::set_filename (const Cody::Packet &packet) |
| 22913 | { |
| 22914 | if (packet.GetCode () == Cody::Client::PC_PATHNAME) |
| 22915 | { |
| 22916 | /* If we've seen this import before we better have the same CMI. */ |
| 22917 | const std::string &path = packet.GetString (); |
| 22918 | if (!filename) |
| 22919 | filename = xstrdup (packet.GetString ().c_str ()); |
| 22920 | else if (filename != path) |
| 22921 | error_at (loc, "mismatching compiled module interface: " |
| 22922 | "had %qs, got %qs" , filename, path.c_str ()); |
| 22923 | } |
| 22924 | else |
| 22925 | { |
| 22926 | gcc_checking_assert (packet.GetCode () == Cody::Client::PC_ERROR); |
| 22927 | fatal_error (loc, "unknown compiled module interface: %s" , |
| 22928 | packet.GetString ().c_str ()); |
| 22929 | } |
| 22930 | } |
| 22931 | |
| 22932 | /* The list of importable headers from C++ Table 24. */ |
| 22933 | |
| 22934 | static const char * |
| 22935 | [] = |
| 22936 | { |
| 22937 | "algorithm" , "any" , "array" , "atomic" , |
| 22938 | "barrier" , "bit" , "bitset" , |
| 22939 | "charconv" , "chrono" , "compare" , "complex" , "concepts" , |
| 22940 | "condition_variable" , "contracts" , "coroutine" , |
| 22941 | "debugging" , "deque" , |
| 22942 | "exception" , "execution" , "expected" , |
| 22943 | "filesystem" , "flat_map" , "flat_set" , "format" , "forward_list" , |
| 22944 | "fstream" , "functional" , "future" , |
| 22945 | "generator" , |
| 22946 | "hazard_pointer" , "hive" , |
| 22947 | "initializer_list" , "inplace_vector" , "iomanip" , "ios" , "iosfwd" , |
| 22948 | "iostream" , "istream" , "iterator" , |
| 22949 | "latch" , "limits" , "linalg" , "list" , "locale" , |
| 22950 | "map" , "mdspan" , "memory" , "memory_resource" , "meta" , "mutex" , |
| 22951 | "new" , "numbers" , "numeric" , |
| 22952 | "optional" , "ostream" , |
| 22953 | "print" , |
| 22954 | "queue" , |
| 22955 | "random" , "ranges" , "ratio" , "rcu" , "regex" , |
| 22956 | "scoped_allocator" , "semaphore" , "set" , "shared_mutex" , "simd" , |
| 22957 | "source_location" , "span" , "spanstream" , "sstream" , "stack" , "stacktrace" , |
| 22958 | "stdexcept" , "stdfloat" , "stop_token" , "streambuf" , "string" , |
| 22959 | "string_view" , "syncstream" , "system_error" , |
| 22960 | "text_encoding" , "thread" , "tuple" , "type_traits" , "typeindex" , "typeinfo" , |
| 22961 | "unordered_map" , "unordered_set" , |
| 22962 | "utility" , |
| 22963 | "valarray" , "variant" , "vector" , "version" |
| 22964 | }; |
| 22965 | |
| 22966 | /* True iff <name> is listed as an importable standard header. */ |
| 22967 | |
| 22968 | static bool |
| 22969 | (const char *name) |
| 22970 | { |
| 22971 | unsigned lo = 0; |
| 22972 | unsigned hi = ARRAY_SIZE (importable_headers); |
| 22973 | while (hi > lo) |
| 22974 | { |
| 22975 | unsigned mid = (lo + hi)/2; |
| 22976 | int cmp = strcmp (s1: name, s2: importable_headers[mid]); |
| 22977 | if (cmp > 0) |
| 22978 | lo = mid + 1; |
| 22979 | else if (cmp < 0) |
| 22980 | hi = mid; |
| 22981 | else |
| 22982 | return true; |
| 22983 | } |
| 22984 | return false; |
| 22985 | } |
| 22986 | |
| 22987 | /* Figure out whether to treat HEADER as an include or an import. */ |
| 22988 | |
| 22989 | static char * |
| 22990 | maybe_translate_include (cpp_reader *reader, line_maps *lmaps, location_t loc, |
| 22991 | _cpp_file *file, bool angle, const char **alternate) |
| 22992 | { |
| 22993 | if (!modules_p ()) |
| 22994 | { |
| 22995 | /* Turn off. */ |
| 22996 | cpp_get_callbacks (reader)->translate_include = NULL; |
| 22997 | return nullptr; |
| 22998 | } |
| 22999 | |
| 23000 | const char *path = _cpp_get_file_path (file); |
| 23001 | |
| 23002 | dump.push (NULL); |
| 23003 | |
| 23004 | dump () && dump ("Checking include translation '%s'" , path); |
| 23005 | auto *mapper = get_mapper (loc: cpp_main_loc (reader), deps: cpp_get_deps (reader)); |
| 23006 | |
| 23007 | size_t len = strlen (s: path); |
| 23008 | path = canonicalize_header_name (NULL, loc, unquoted: true, str: path, len_r&: len); |
| 23009 | auto packet = mapper->IncludeTranslate (str: path, flags: Cody::Flags::None, len); |
| 23010 | |
| 23011 | enum class xlate_kind { |
| 23012 | unknown, text, import, invalid |
| 23013 | } translate = xlate_kind::unknown; |
| 23014 | |
| 23015 | if (packet.GetCode () == Cody::Client::PC_BOOL) |
| 23016 | translate = packet.GetInteger () ? xlate_kind::text : xlate_kind::unknown; |
| 23017 | else if (packet.GetCode () == Cody::Client::PC_PATHNAME) |
| 23018 | { |
| 23019 | /* Record the CMI name for when we do the import. |
| 23020 | We may already know about this import, but libcpp doesn't yet. */ |
| 23021 | module_state *import = get_module (name: build_string (len, path)); |
| 23022 | import->set_filename (packet); |
| 23023 | if (import->check_importable (reader)) |
| 23024 | translate = xlate_kind::import; |
| 23025 | else |
| 23026 | translate = xlate_kind::invalid; |
| 23027 | } |
| 23028 | else |
| 23029 | { |
| 23030 | gcc_checking_assert (packet.GetCode () == Cody::Client::PC_ERROR); |
| 23031 | error_at (loc, "cannot determine %<#include%> translation of %s: %s" , |
| 23032 | path, packet.GetString ().c_str ()); |
| 23033 | } |
| 23034 | |
| 23035 | bool note = (translate == xlate_kind::invalid); |
| 23036 | if (note_include_translate_yes && translate == xlate_kind::import) |
| 23037 | note = true; |
| 23038 | else if (note_include_translate_no && translate == xlate_kind::unknown) |
| 23039 | note = true; |
| 23040 | else if (note_includes) |
| 23041 | /* We do not expect the note_includes vector to be large, so O(N) |
| 23042 | iteration. */ |
| 23043 | for (unsigned ix = note_includes->length (); !note && ix--;) |
| 23044 | if (!strcmp (s1: (*note_includes)[ix], s2: path)) |
| 23045 | note = true; |
| 23046 | |
| 23047 | /* Maybe try importing a different header instead. */ |
| 23048 | if (alternate && translate == xlate_kind::unknown) |
| 23049 | { |
| 23050 | const char *fname = _cpp_get_file_name (file); |
| 23051 | /* Redirect importable <name> to <bits/stdc++.h>. */ |
| 23052 | /* ??? Generalize to use a .json. */ |
| 23053 | expanded_location eloc = expand_location (loc); |
| 23054 | if (angle && is_importable_header (name: fname) |
| 23055 | /* Exclude <version> which often goes with import std. */ |
| 23056 | && strcmp (s1: fname, s2: "version" ) != 0 |
| 23057 | /* Don't redirect #includes between headers under the same include |
| 23058 | path directory (i.e. between library headers); if the import |
| 23059 | brings in the current file we then get redefinition errors. */ |
| 23060 | && !strstr (haystack: eloc.file, needle: _cpp_get_file_dir (file)->name) |
| 23061 | /* ??? These are needed when running a toolchain from the build |
| 23062 | directory, because libsupc++ headers aren't linked into |
| 23063 | libstdc++-v3/include with the other headers. */ |
| 23064 | && !strstr (haystack: eloc.file, needle: "libstdc++-v3/include" ) |
| 23065 | && !strstr (haystack: eloc.file, needle: "libsupc++" )) |
| 23066 | *alternate = "bits/stdc++.h" ; |
| 23067 | } |
| 23068 | |
| 23069 | if (note) |
| 23070 | inform (loc, translate == xlate_kind::import |
| 23071 | ? G_("include %qs translated to import" ) |
| 23072 | : translate == xlate_kind::invalid |
| 23073 | ? G_("import of %qs failed, falling back to include" ) |
| 23074 | : G_("include %qs processed textually" ), path); |
| 23075 | |
| 23076 | dump () && dump (translate == xlate_kind::import |
| 23077 | ? "Translating include to import" |
| 23078 | : "Keeping include as include" ); |
| 23079 | dump.pop (n: 0); |
| 23080 | |
| 23081 | if (translate != xlate_kind::import) |
| 23082 | return nullptr; |
| 23083 | |
| 23084 | /* Create the translation text. */ |
| 23085 | loc = ordinary_loc_of (lmaps, from: loc); |
| 23086 | const line_map_ordinary *map |
| 23087 | = linemap_check_ordinary (map: linemap_lookup (lmaps, loc)); |
| 23088 | unsigned col = SOURCE_COLUMN (ord_map: map, loc); |
| 23089 | col -= (col != 0); /* Columns are 1-based. */ |
| 23090 | |
| 23091 | unsigned alloc = len + col + 60; |
| 23092 | char *res = XNEWVEC (char, alloc); |
| 23093 | |
| 23094 | strcpy (dest: res, src: "__import" ); |
| 23095 | unsigned actual = 8; |
| 23096 | if (col > actual) |
| 23097 | { |
| 23098 | /* Pad out so the filename appears at the same position. */ |
| 23099 | memset (s: res + actual, c: ' ', n: col - actual); |
| 23100 | actual = col; |
| 23101 | } |
| 23102 | /* No need to encode characters, that's not how header names are |
| 23103 | handled. */ |
| 23104 | actual += snprintf (s: res + actual, maxlen: alloc - actual, |
| 23105 | format: "\"%s\" [[__translated]];\n" , path); |
| 23106 | gcc_checking_assert (actual < alloc); |
| 23107 | |
| 23108 | /* cpplib will delete the buffer. */ |
| 23109 | return res; |
| 23110 | } |
| 23111 | |
| 23112 | static void |
| 23113 | (cpp_reader *reader) |
| 23114 | { |
| 23115 | /* Set the module header name from the main_input_filename. */ |
| 23116 | const char *main = main_input_filename; |
| 23117 | size_t len = strlen (s: main); |
| 23118 | main = canonicalize_header_name (NULL, loc: 0, unquoted: true, str: main, len_r&: len); |
| 23119 | module_state *module = get_module (name: build_string (len, main)); |
| 23120 | |
| 23121 | preprocess_module (module, cpp_main_loc (reader), in_purview: false, is_import: false, export_p: true, reader); |
| 23122 | } |
| 23123 | |
| 23124 | /* We've just properly entered the main source file. I.e. after the |
| 23125 | command line, builtins and forced headers. Record the line map and |
| 23126 | location of this map. Note we may be called more than once. The |
| 23127 | first call sticks. */ |
| 23128 | |
| 23129 | void |
| 23130 | module_begin_main_file (cpp_reader *reader, line_maps *lmaps, |
| 23131 | const line_map_ordinary *map) |
| 23132 | { |
| 23133 | gcc_checking_assert (lmaps == line_table); |
| 23134 | if (modules_p () && !spans.init_p ()) |
| 23135 | { |
| 23136 | unsigned n = dump.push (NULL); |
| 23137 | spans.init (lmaps, map); |
| 23138 | dump.pop (n); |
| 23139 | if (flag_header_unit && !cpp_get_options (reader)->preprocessed) |
| 23140 | { |
| 23141 | /* Tell the preprocessor this is an include file. */ |
| 23142 | cpp_retrofit_as_include (reader); |
| 23143 | begin_header_unit (reader); |
| 23144 | } |
| 23145 | } |
| 23146 | } |
| 23147 | |
| 23148 | /* Process the pending_import queue, making sure we know the |
| 23149 | filenames. */ |
| 23150 | |
| 23151 | static void |
| 23152 | name_pending_imports (cpp_reader *reader) |
| 23153 | { |
| 23154 | auto *mapper = get_mapper (loc: cpp_main_loc (reader), deps: cpp_get_deps (reader)); |
| 23155 | |
| 23156 | if (!vec_safe_length (v: pending_imports)) |
| 23157 | /* Not doing anything. */ |
| 23158 | return; |
| 23159 | |
| 23160 | timevar_start (TV_MODULE_MAPPER); |
| 23161 | |
| 23162 | auto n = dump.push (NULL); |
| 23163 | dump () && dump ("Resolving direct import names" ); |
| 23164 | bool want_deps = (bool (mapper->get_flags () & Cody::Flags::NameOnly) |
| 23165 | || cpp_get_deps (reader)); |
| 23166 | bool any = false; |
| 23167 | |
| 23168 | for (unsigned ix = 0; ix != pending_imports->length (); ix++) |
| 23169 | { |
| 23170 | module_state *module = (*pending_imports)[ix]; |
| 23171 | gcc_checking_assert (module->is_direct ()); |
| 23172 | if (!module->filename && !module->visited_p) |
| 23173 | { |
| 23174 | bool export_p = (module->is_module () |
| 23175 | && (module->is_partition () |
| 23176 | || module->is_exported ())); |
| 23177 | |
| 23178 | Cody::Flags flags = Cody::Flags::None; |
| 23179 | if (flag_preprocess_only |
| 23180 | && !(module->is_header () && !export_p)) |
| 23181 | { |
| 23182 | if (!want_deps) |
| 23183 | continue; |
| 23184 | flags = Cody::Flags::NameOnly; |
| 23185 | } |
| 23186 | |
| 23187 | if (!any) |
| 23188 | { |
| 23189 | any = true; |
| 23190 | mapper->Cork (); |
| 23191 | } |
| 23192 | if (export_p) |
| 23193 | mapper->ModuleExport (str: module->get_flatname (), flags); |
| 23194 | else |
| 23195 | mapper->ModuleImport (str: module->get_flatname (), flags); |
| 23196 | module->visited_p = true; |
| 23197 | } |
| 23198 | } |
| 23199 | |
| 23200 | if (any) |
| 23201 | { |
| 23202 | auto response = mapper->Uncork (); |
| 23203 | auto r_iter = response.begin (); |
| 23204 | for (unsigned ix = 0; ix != pending_imports->length (); ix++) |
| 23205 | { |
| 23206 | module_state *module = (*pending_imports)[ix]; |
| 23207 | if (module->visited_p) |
| 23208 | { |
| 23209 | module->visited_p = false; |
| 23210 | gcc_checking_assert (!module->filename); |
| 23211 | |
| 23212 | module->set_filename (*r_iter); |
| 23213 | ++r_iter; |
| 23214 | } |
| 23215 | } |
| 23216 | } |
| 23217 | |
| 23218 | dump.pop (n); |
| 23219 | |
| 23220 | timevar_stop (TV_MODULE_MAPPER); |
| 23221 | } |
| 23222 | |
| 23223 | /* We've just lexed a module-specific control line for MODULE. Mark |
| 23224 | the module as a direct import, and possibly load up its macro |
| 23225 | state. Returns the primary module, if this is a module |
| 23226 | declaration. */ |
| 23227 | /* Perhaps we should offer a preprocessing mode where we read the |
| 23228 | directives from the header unit, rather than require the header's |
| 23229 | CMI. */ |
| 23230 | |
| 23231 | module_state * |
| 23232 | preprocess_module (module_state *module, location_t from_loc, |
| 23233 | bool in_purview, bool is_import, bool is_export, |
| 23234 | cpp_reader *reader) |
| 23235 | { |
| 23236 | if (!is_import) |
| 23237 | { |
| 23238 | if (in_purview || module->loc) |
| 23239 | { |
| 23240 | /* We've already seen a module declaration. If only preprocessing |
| 23241 | then we won't complain in declare_module, so complain here. */ |
| 23242 | if (flag_preprocess_only) |
| 23243 | error_at (from_loc, |
| 23244 | in_purview |
| 23245 | ? G_("module already declared" ) |
| 23246 | : G_("module already imported" )); |
| 23247 | /* Always pretend this was an import to aid error recovery. */ |
| 23248 | is_import = true; |
| 23249 | } |
| 23250 | else |
| 23251 | { |
| 23252 | /* Record it is the module. */ |
| 23253 | module->module_p = true; |
| 23254 | if (is_export) |
| 23255 | { |
| 23256 | module->exported_p = true; |
| 23257 | module->interface_p = true; |
| 23258 | } |
| 23259 | } |
| 23260 | } |
| 23261 | |
| 23262 | if (module->directness < MD_DIRECT + in_purview) |
| 23263 | { |
| 23264 | /* Mark as a direct import. */ |
| 23265 | module->directness = module_directness (MD_DIRECT + in_purview); |
| 23266 | |
| 23267 | /* Set the location to be most informative for users. */ |
| 23268 | from_loc = ordinary_loc_of (lmaps: line_table, from: from_loc); |
| 23269 | if (module->loadedness != ML_NONE) |
| 23270 | linemap_module_reparent (line_table, loc: module->loc, new_parent: from_loc); |
| 23271 | else |
| 23272 | { |
| 23273 | /* Don't overwrite the location if we're importing ourselves |
| 23274 | after already having seen a module-declaration. */ |
| 23275 | if (!(is_import && module->is_module ())) |
| 23276 | module->loc = from_loc; |
| 23277 | if (!module->flatname) |
| 23278 | module->set_flatname (); |
| 23279 | } |
| 23280 | } |
| 23281 | |
| 23282 | auto desired = ML_CONFIG; |
| 23283 | if (is_import |
| 23284 | && module->is_header () |
| 23285 | && (!cpp_get_options (reader)->preprocessed |
| 23286 | || cpp_get_options (reader)->directives_only)) |
| 23287 | /* We need preprocessor state now. */ |
| 23288 | desired = ML_PREPROCESSOR; |
| 23289 | |
| 23290 | if (!is_import || module->loadedness < desired) |
| 23291 | { |
| 23292 | vec_safe_push (v&: pending_imports, obj: module); |
| 23293 | |
| 23294 | if (desired == ML_PREPROCESSOR) |
| 23295 | { |
| 23296 | unsigned n = dump.push (NULL); |
| 23297 | |
| 23298 | dump () && dump ("Reading %M preprocessor state" , module); |
| 23299 | name_pending_imports (reader); |
| 23300 | |
| 23301 | /* Preserve the state of the line-map. */ |
| 23302 | auto pre_hwm = LINEMAPS_ORDINARY_USED (set: line_table); |
| 23303 | |
| 23304 | /* We only need to close the span, if we're going to emit a |
| 23305 | CMI. But that's a little tricky -- our token scanner |
| 23306 | needs to be smarter -- and this isn't much state. |
| 23307 | Remember, we've not parsed anything at this point, so |
| 23308 | our module state flags are inadequate. */ |
| 23309 | spans.maybe_init (); |
| 23310 | spans.close (); |
| 23311 | |
| 23312 | timevar_start (TV_MODULE_IMPORT); |
| 23313 | |
| 23314 | /* Load the config of each pending import -- we must assign |
| 23315 | module numbers monotonically. */ |
| 23316 | for (unsigned ix = 0; ix != pending_imports->length (); ix++) |
| 23317 | { |
| 23318 | auto *import = (*pending_imports)[ix]; |
| 23319 | if (!(import->is_module () |
| 23320 | && (import->is_partition () || import->is_exported ())) |
| 23321 | && import->loadedness == ML_NONE |
| 23322 | && (import->is_header () || !flag_preprocess_only)) |
| 23323 | { |
| 23324 | unsigned n = dump.push (m: import); |
| 23325 | import->do_import (reader, outermost: true); |
| 23326 | dump.pop (n); |
| 23327 | } |
| 23328 | } |
| 23329 | vec_free (v&: pending_imports); |
| 23330 | |
| 23331 | /* Restore the line-map state. */ |
| 23332 | spans.open (hwm: linemap_module_restore (line_table, lwm: pre_hwm)); |
| 23333 | |
| 23334 | /* Now read the preprocessor state of this particular |
| 23335 | import. */ |
| 23336 | if (module->loadedness == ML_CONFIG |
| 23337 | && module->read_preprocessor (outermost: true)) |
| 23338 | module->import_macros (); |
| 23339 | |
| 23340 | timevar_stop (TV_MODULE_IMPORT); |
| 23341 | |
| 23342 | dump.pop (n); |
| 23343 | } |
| 23344 | } |
| 23345 | |
| 23346 | return is_import ? NULL : get_primary (parent: module); |
| 23347 | } |
| 23348 | |
| 23349 | /* We've completed phase-4 translation. Emit any dependency |
| 23350 | information for the not-yet-loaded direct imports, and fill in |
| 23351 | their file names. We'll have already loaded up the direct header |
| 23352 | unit wavefront. */ |
| 23353 | |
| 23354 | void |
| 23355 | preprocessed_module (cpp_reader *reader) |
| 23356 | { |
| 23357 | unsigned n = dump.push (NULL); |
| 23358 | |
| 23359 | dump () && dump ("Completed phase-4 (tokenization) processing" ); |
| 23360 | |
| 23361 | name_pending_imports (reader); |
| 23362 | vec_free (v&: pending_imports); |
| 23363 | |
| 23364 | spans.maybe_init (); |
| 23365 | spans.close (); |
| 23366 | |
| 23367 | using iterator = hash_table<module_state_hash>::iterator; |
| 23368 | if (mkdeps *deps = cpp_get_deps (reader)) |
| 23369 | { |
| 23370 | /* Walk the module hash, informing the dependency machinery. */ |
| 23371 | iterator end = modules_hash->end (); |
| 23372 | for (iterator iter = modules_hash->begin (); iter != end; ++iter) |
| 23373 | { |
| 23374 | module_state *module = *iter; |
| 23375 | |
| 23376 | if (module->is_direct ()) |
| 23377 | { |
| 23378 | if (module->is_module () |
| 23379 | && (module->is_interface () || module->is_partition ())) |
| 23380 | deps_add_module_target (deps, module: module->get_flatname (), |
| 23381 | cmi: maybe_add_cmi_prefix (to: module->filename), |
| 23382 | is_header: module->is_header (), |
| 23383 | is_exported: module->is_exported ()); |
| 23384 | else |
| 23385 | deps_add_module_dep (deps, module: module->get_flatname ()); |
| 23386 | } |
| 23387 | } |
| 23388 | } |
| 23389 | |
| 23390 | if (flag_header_unit && !flag_preprocess_only) |
| 23391 | { |
| 23392 | /* Find the main module -- remember, it's not yet in the module |
| 23393 | array. */ |
| 23394 | iterator end = modules_hash->end (); |
| 23395 | for (iterator iter = modules_hash->begin (); iter != end; ++iter) |
| 23396 | { |
| 23397 | module_state *module = *iter; |
| 23398 | if (module->is_module ()) |
| 23399 | { |
| 23400 | declare_module (module, from_loc: cpp_main_loc (reader), exporting_p: true, NULL, reader); |
| 23401 | module_kind |= MK_EXPORTING; |
| 23402 | break; |
| 23403 | } |
| 23404 | } |
| 23405 | } |
| 23406 | |
| 23407 | dump.pop (n); |
| 23408 | } |
| 23409 | |
| 23410 | /* VAL is a global tree, add it to the global vec if it is |
| 23411 | interesting. Add some of its targets, if they too are |
| 23412 | interesting. We do not add identifiers, as they can be re-found |
| 23413 | via the identifier hash table. There is a cost to the number of |
| 23414 | global trees. */ |
| 23415 | |
| 23416 | static int |
| 23417 | maybe_add_global (tree val, unsigned &crc) |
| 23418 | { |
| 23419 | int v = 0; |
| 23420 | |
| 23421 | if (val && !(identifier_p (t: val) || TREE_VISITED (val))) |
| 23422 | { |
| 23423 | TREE_VISITED (val) = true; |
| 23424 | crc = crc32_unsigned (chksum: crc, value: fixed_trees->length ()); |
| 23425 | vec_safe_push (v&: fixed_trees, obj: val); |
| 23426 | v++; |
| 23427 | |
| 23428 | if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPED)) |
| 23429 | v += maybe_add_global (TREE_TYPE (val), crc); |
| 23430 | if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPE_COMMON)) |
| 23431 | v += maybe_add_global (TYPE_NAME (val), crc); |
| 23432 | } |
| 23433 | |
| 23434 | return v; |
| 23435 | } |
| 23436 | |
| 23437 | /* Initialize module state. Create the hash table, determine the |
| 23438 | global trees. Create the module for current TU. */ |
| 23439 | |
| 23440 | void |
| 23441 | init_modules (cpp_reader *reader) |
| 23442 | { |
| 23443 | /* PCH should not be reachable because of lang-specs, but the |
| 23444 | user could have overriden that. */ |
| 23445 | if (pch_file) |
| 23446 | fatal_error (input_location, |
| 23447 | "C++ modules are incompatible with precompiled headers" ); |
| 23448 | |
| 23449 | if (cpp_get_options (reader)->traditional) |
| 23450 | fatal_error (input_location, |
| 23451 | "C++ modules are incompatible with traditional preprocessing" ); |
| 23452 | |
| 23453 | /* :: is always exported. */ |
| 23454 | DECL_MODULE_EXPORT_P (global_namespace) = true; |
| 23455 | |
| 23456 | modules_hash = hash_table<module_state_hash>::create_ggc (n: 31); |
| 23457 | vec_safe_reserve (v&: modules, nelems: 20); |
| 23458 | |
| 23459 | /* Create module for current TU. */ |
| 23460 | module_state *current |
| 23461 | = new (ggc_alloc<module_state> ()) module_state (NULL_TREE, NULL, false); |
| 23462 | current->mod = 0; |
| 23463 | bitmap_set_bit (current->imports, 0); |
| 23464 | modules->quick_push (obj: current); |
| 23465 | |
| 23466 | gcc_checking_assert (!fixed_trees); |
| 23467 | |
| 23468 | headers = BITMAP_GGC_ALLOC (); |
| 23469 | |
| 23470 | if (note_includes) |
| 23471 | /* Canonicalize header names. */ |
| 23472 | for (unsigned ix = 0; ix != note_includes->length (); ix++) |
| 23473 | { |
| 23474 | const char *hdr = (*note_includes)[ix]; |
| 23475 | size_t len = strlen (s: hdr); |
| 23476 | |
| 23477 | bool system = hdr[0] == '<'; |
| 23478 | bool user = hdr[0] == '"'; |
| 23479 | bool delimed = system || user; |
| 23480 | |
| 23481 | if (len <= (delimed ? 2 : 0) |
| 23482 | || (delimed && hdr[len-1] != (system ? '>' : '"'))) |
| 23483 | error ("invalid header name %qs" , hdr); |
| 23484 | |
| 23485 | hdr = canonicalize_header_name (reader: delimed ? reader : NULL, |
| 23486 | loc: 0, unquoted: !delimed, str: hdr, len_r&: len); |
| 23487 | char *path = XNEWVEC (char, len + 1); |
| 23488 | memcpy (dest: path, src: hdr, n: len); |
| 23489 | path[len] = 0; |
| 23490 | |
| 23491 | (*note_includes)[ix] = path; |
| 23492 | } |
| 23493 | |
| 23494 | if (note_cmis) |
| 23495 | /* Canonicalize & mark module names. */ |
| 23496 | for (unsigned ix = 0; ix != note_cmis->length (); ix++) |
| 23497 | { |
| 23498 | const char *name = (*note_cmis)[ix]; |
| 23499 | size_t len = strlen (s: name); |
| 23500 | |
| 23501 | bool is_system = name[0] == '<'; |
| 23502 | bool is_user = name[0] == '"'; |
| 23503 | bool is_pathname = false; |
| 23504 | if (!(is_system || is_user)) |
| 23505 | for (unsigned ix = len; !is_pathname && ix--;) |
| 23506 | is_pathname = IS_DIR_SEPARATOR (name[ix]); |
| 23507 | if (is_system || is_user || is_pathname) |
| 23508 | { |
| 23509 | if (len <= (is_pathname ? 0 : 2) |
| 23510 | || (!is_pathname && name[len-1] != (is_system ? '>' : '"'))) |
| 23511 | { |
| 23512 | error ("invalid header name %qs" , name); |
| 23513 | continue; |
| 23514 | } |
| 23515 | else |
| 23516 | name = canonicalize_header_name (reader: is_pathname ? nullptr : reader, |
| 23517 | loc: 0, unquoted: is_pathname, str: name, len_r&: len); |
| 23518 | } |
| 23519 | if (auto module = get_module (ptr: name)) |
| 23520 | module->inform_cmi_p = 1; |
| 23521 | else |
| 23522 | error ("invalid module name %qs" , name); |
| 23523 | } |
| 23524 | |
| 23525 | dump.push (NULL); |
| 23526 | |
| 23527 | /* Determine lazy handle bound. */ |
| 23528 | { |
| 23529 | unsigned limit = 1000; |
| 23530 | #if HAVE_GETRLIMIT |
| 23531 | struct rlimit rlimit; |
| 23532 | if (!getrlimit (RLIMIT_NOFILE, rlimits: &rlimit)) |
| 23533 | { |
| 23534 | lazy_hard_limit = (rlimit.rlim_max < 1000000 |
| 23535 | ? unsigned (rlimit.rlim_max) : 1000000); |
| 23536 | lazy_hard_limit = (lazy_hard_limit > LAZY_HEADROOM |
| 23537 | ? lazy_hard_limit - LAZY_HEADROOM : 0); |
| 23538 | if (rlimit.rlim_cur < limit) |
| 23539 | limit = unsigned (rlimit.rlim_cur); |
| 23540 | } |
| 23541 | #endif |
| 23542 | limit = limit > LAZY_HEADROOM ? limit - LAZY_HEADROOM : 1; |
| 23543 | |
| 23544 | if (unsigned parm = param_lazy_modules) |
| 23545 | { |
| 23546 | if (parm <= limit || !lazy_hard_limit || !try_increase_lazy (want: parm)) |
| 23547 | lazy_limit = parm; |
| 23548 | } |
| 23549 | else |
| 23550 | lazy_limit = limit; |
| 23551 | } |
| 23552 | |
| 23553 | if (dump ()) |
| 23554 | { |
| 23555 | verstr_t ver; |
| 23556 | version2string (MODULE_VERSION, out&: ver); |
| 23557 | dump ("Source: %s" , main_input_filename); |
| 23558 | dump ("Compiler: %s" , version_string); |
| 23559 | dump ("Modules: %s" , ver); |
| 23560 | dump ("Checking: %s" , |
| 23561 | #if CHECKING_P |
| 23562 | "checking" |
| 23563 | #elif ENABLE_ASSERT_CHECKING |
| 23564 | "asserting" |
| 23565 | #else |
| 23566 | "release" |
| 23567 | #endif |
| 23568 | ); |
| 23569 | dump ("Compiled by: " |
| 23570 | #ifdef __GNUC__ |
| 23571 | "GCC %d.%d, %s" , __GNUC__, __GNUC_MINOR__, |
| 23572 | #ifdef __OPTIMIZE__ |
| 23573 | "optimizing" |
| 23574 | #else |
| 23575 | "not optimizing" |
| 23576 | #endif |
| 23577 | #else |
| 23578 | "not GCC" |
| 23579 | #endif |
| 23580 | ); |
| 23581 | dump ("Reading: %s" , MAPPED_READING ? "mmap" : "fileio" ); |
| 23582 | dump ("Writing: %s" , MAPPED_WRITING ? "mmap" : "fileio" ); |
| 23583 | dump ("Lazy limit: %u" , lazy_limit); |
| 23584 | dump ("Lazy hard limit: %u" , lazy_hard_limit); |
| 23585 | dump ("" ); |
| 23586 | } |
| 23587 | |
| 23588 | /* Construct the global tree array. This is an array of unique |
| 23589 | global trees (& types). Do this now, rather than lazily, as |
| 23590 | some global trees are lazily created and we don't want that to |
| 23591 | mess with our syndrome of fixed trees. */ |
| 23592 | unsigned crc = 0; |
| 23593 | vec_alloc (v&: fixed_trees, nelems: 250); |
| 23594 | |
| 23595 | dump () && dump ("+Creating globals" ); |
| 23596 | /* Insert the TRANSLATION_UNIT_DECL. */ |
| 23597 | TREE_VISITED (DECL_CONTEXT (global_namespace)) = true; |
| 23598 | fixed_trees->quick_push (DECL_CONTEXT (global_namespace)); |
| 23599 | for (unsigned jx = 0; global_tree_arys[jx].first; jx++) |
| 23600 | { |
| 23601 | const tree *ptr = global_tree_arys[jx].first; |
| 23602 | unsigned limit = global_tree_arys[jx].second; |
| 23603 | |
| 23604 | for (unsigned ix = 0; ix != limit; ix++, ptr++) |
| 23605 | { |
| 23606 | !(ix & 31) && dump ("" ) && dump ("+\t%u:%u:" , jx, ix); |
| 23607 | unsigned v = maybe_add_global (val: *ptr, crc); |
| 23608 | dump () && dump ("+%u" , v); |
| 23609 | } |
| 23610 | } |
| 23611 | /* OS- and machine-specific types are dynamically registered at |
| 23612 | runtime, so cannot be part of global_tree_arys. */ |
| 23613 | registered_builtin_types && dump ("" ) && dump ("+\tB:" ); |
| 23614 | for (tree t = registered_builtin_types; t; t = TREE_CHAIN (t)) |
| 23615 | { |
| 23616 | unsigned v = maybe_add_global (TREE_VALUE (t), crc); |
| 23617 | dump () && dump ("+%u" , v); |
| 23618 | } |
| 23619 | global_crc = crc32_unsigned (chksum: crc, value: fixed_trees->length ()); |
| 23620 | dump ("" ) && dump ("Created %u unique globals, crc=%x" , |
| 23621 | fixed_trees->length (), global_crc); |
| 23622 | for (unsigned ix = fixed_trees->length (); ix--;) |
| 23623 | TREE_VISITED ((*fixed_trees)[ix]) = false; |
| 23624 | |
| 23625 | dump.pop (n: 0); |
| 23626 | |
| 23627 | if (!flag_module_lazy) |
| 23628 | /* Get the mapper now, if we're not being lazy. */ |
| 23629 | get_mapper (loc: cpp_main_loc (reader), deps: cpp_get_deps (reader)); |
| 23630 | |
| 23631 | if (!flag_preprocess_only) |
| 23632 | { |
| 23633 | pending_table = new pending_map_t (EXPERIMENT (1, 400)); |
| 23634 | entity_map = new entity_map_t (EXPERIMENT (1, 400)); |
| 23635 | vec_safe_reserve (v&: entity_ary, EXPERIMENT (1, 400)); |
| 23636 | imported_temploid_friends |
| 23637 | = decl_tree_cache_map::create_ggc (EXPERIMENT (1, 400)); |
| 23638 | } |
| 23639 | |
| 23640 | #if CHECKING_P |
| 23641 | note_defs = note_defs_table_t::create_ggc (n: 1000); |
| 23642 | #endif |
| 23643 | |
| 23644 | if (flag_header_unit && cpp_get_options (reader)->preprocessed) |
| 23645 | begin_header_unit (reader); |
| 23646 | |
| 23647 | /* Collect here to make sure things are tagged correctly (when |
| 23648 | aggressively GC'd). */ |
| 23649 | ggc_collect (); |
| 23650 | } |
| 23651 | |
| 23652 | /* If NODE is a deferred macro, load it. */ |
| 23653 | |
| 23654 | static int |
| 23655 | load_macros (cpp_reader *reader, cpp_hashnode *node, void *) |
| 23656 | { |
| 23657 | location_t main_loc |
| 23658 | = MAP_START_LOCATION (map: LINEMAPS_ORDINARY_MAP_AT (set: line_table, index: 0)); |
| 23659 | |
| 23660 | if (cpp_user_macro_p (node) |
| 23661 | && !node->value.macro) |
| 23662 | { |
| 23663 | cpp_macro *macro = cpp_get_deferred_macro (reader, node, main_loc); |
| 23664 | dump () && dump ("Loaded macro #%s %I" , |
| 23665 | macro ? "define" : "undef" , identifier (node)); |
| 23666 | } |
| 23667 | |
| 23668 | return 1; |
| 23669 | } |
| 23670 | |
| 23671 | /* At the end of tokenizing, we no longer need the macro tables of |
| 23672 | imports. But the user might have requested some checking. */ |
| 23673 | |
| 23674 | void |
| 23675 | maybe_check_all_macros (cpp_reader *reader) |
| 23676 | { |
| 23677 | if (!warn_imported_macros) |
| 23678 | return; |
| 23679 | |
| 23680 | /* Force loading of any remaining deferred macros. This will |
| 23681 | produce diagnostics if they are ill-formed. */ |
| 23682 | unsigned n = dump.push (NULL); |
| 23683 | cpp_forall_identifiers (reader, load_macros, NULL); |
| 23684 | dump.pop (n); |
| 23685 | } |
| 23686 | |
| 23687 | // State propagated from finish_module_processing to fini_modules |
| 23688 | |
| 23689 | struct module_processing_cookie |
| 23690 | { |
| 23691 | elf_out out; |
| 23692 | module_state_config config; |
| 23693 | char *cmi_name; |
| 23694 | char *tmp_name; |
| 23695 | unsigned crc; |
| 23696 | bool began; |
| 23697 | |
| 23698 | module_processing_cookie (char *cmi, char *tmp, int fd, int e) |
| 23699 | : out (fd, e), cmi_name (cmi), tmp_name (tmp), crc (0), began (false) |
| 23700 | { |
| 23701 | } |
| 23702 | ~module_processing_cookie () |
| 23703 | { |
| 23704 | XDELETEVEC (tmp_name); |
| 23705 | XDELETEVEC (cmi_name); |
| 23706 | } |
| 23707 | }; |
| 23708 | |
| 23709 | /* Write the CMI, if we're a module interface. */ |
| 23710 | |
| 23711 | void * |
| 23712 | finish_module_processing (cpp_reader *reader) |
| 23713 | { |
| 23714 | module_processing_cookie *cookie = nullptr; |
| 23715 | |
| 23716 | if (header_module_p ()) |
| 23717 | module_kind &= ~MK_EXPORTING; |
| 23718 | |
| 23719 | if (!modules || !this_module ()->name) |
| 23720 | { |
| 23721 | if (flag_module_only) |
| 23722 | warning (0, "%<-fmodule-only%> used for non-interface" ); |
| 23723 | } |
| 23724 | else if (!flag_syntax_only) |
| 23725 | { |
| 23726 | int fd = -1; |
| 23727 | int e = -1; |
| 23728 | |
| 23729 | timevar_start (TV_MODULE_EXPORT); |
| 23730 | |
| 23731 | /* Force a valid but empty line map at the end. This simplifies |
| 23732 | the line table preparation and writing logic. */ |
| 23733 | linemap_add (line_table, LC_ENTER, sysp: false, to_file: "" , to_line: 0); |
| 23734 | |
| 23735 | /* We write to a tmpname, and then atomically rename. */ |
| 23736 | char *cmi_name = NULL; |
| 23737 | char *tmp_name = NULL; |
| 23738 | module_state *state = this_module (); |
| 23739 | |
| 23740 | unsigned n = dump.push (m: state); |
| 23741 | state->announce (what: "creating" ); |
| 23742 | if (state->filename) |
| 23743 | { |
| 23744 | size_t len = 0; |
| 23745 | cmi_name = xstrdup (maybe_add_cmi_prefix (to: state->filename, len_p: &len)); |
| 23746 | tmp_name = XNEWVEC (char, len + 3); |
| 23747 | memcpy (dest: tmp_name, src: cmi_name, n: len); |
| 23748 | strcpy (dest: &tmp_name[len], src: "~" ); |
| 23749 | |
| 23750 | if (!errorcount) |
| 23751 | for (unsigned again = 2; ; again--) |
| 23752 | { |
| 23753 | fd = open (file: tmp_name, |
| 23754 | O_RDWR | O_CREAT | O_TRUNC | O_CLOEXEC | O_BINARY, |
| 23755 | S_IRUSR|S_IWUSR|S_IRGRP|S_IWGRP|S_IROTH|S_IWOTH); |
| 23756 | e = errno; |
| 23757 | if (fd >= 0 || !again || e != ENOENT) |
| 23758 | break; |
| 23759 | create_dirs (path: tmp_name); |
| 23760 | } |
| 23761 | if (note_module_cmi_yes || state->inform_cmi_p) |
| 23762 | inform (state->loc, "writing CMI %qs" , cmi_name); |
| 23763 | dump () && dump ("CMI is %s" , cmi_name); |
| 23764 | } |
| 23765 | |
| 23766 | cookie = new module_processing_cookie (cmi_name, tmp_name, fd, e); |
| 23767 | |
| 23768 | if (errorcount) |
| 23769 | /* Don't write the module if we have reported errors. */; |
| 23770 | else if (erroneous_templates |
| 23771 | && !erroneous_templates->is_empty ()) |
| 23772 | { |
| 23773 | /* Don't write the module if it contains an erroneous template. |
| 23774 | Also emit notes about where errors occurred in case |
| 23775 | -Wno-template-body was passed. */ |
| 23776 | auto_diagnostic_group d; |
| 23777 | error_at (state->loc, "not writing module %qs due to errors " |
| 23778 | "in template bodies" , state->get_flatname ()); |
| 23779 | if (!warn_template_body) |
| 23780 | inform (state->loc, "enable %<-Wtemplate-body%> for more details" ); |
| 23781 | for (auto e : *erroneous_templates) |
| 23782 | inform (e.second, "first error in %qD appeared here" , e.first); |
| 23783 | } |
| 23784 | else if (cookie->out.begin ()) |
| 23785 | { |
| 23786 | /* So crashes finger-point the module decl. */ |
| 23787 | iloc_sentinel ils = state->loc; |
| 23788 | if (state->write_begin (to: &cookie->out, reader, config&: cookie->config, |
| 23789 | crc&: cookie->crc)) |
| 23790 | cookie->began = true; |
| 23791 | } |
| 23792 | |
| 23793 | dump.pop (n); |
| 23794 | timevar_stop (TV_MODULE_EXPORT); |
| 23795 | |
| 23796 | ggc_collect (); |
| 23797 | } |
| 23798 | |
| 23799 | if (modules) |
| 23800 | { |
| 23801 | unsigned n = dump.push (NULL); |
| 23802 | dump () && dump ("Imported %u modules" , modules->length () - 1); |
| 23803 | dump () && dump ("Containing %u clusters" , available_clusters); |
| 23804 | dump () && dump ("Loaded %u clusters (%u%%)" , loaded_clusters, |
| 23805 | (loaded_clusters * 100 + available_clusters / 2) / |
| 23806 | (available_clusters + !available_clusters)); |
| 23807 | dump.pop (n); |
| 23808 | } |
| 23809 | |
| 23810 | return cookie; |
| 23811 | } |
| 23812 | |
| 23813 | // Do the final emission of a module. At this point we know whether |
| 23814 | // the module static initializer is a NOP or not. |
| 23815 | |
| 23816 | static void |
| 23817 | late_finish_module (cpp_reader *reader, module_processing_cookie *cookie, |
| 23818 | bool init_fn_non_empty) |
| 23819 | { |
| 23820 | timevar_start (TV_MODULE_EXPORT); |
| 23821 | |
| 23822 | module_state *state = this_module (); |
| 23823 | unsigned n = dump.push (m: state); |
| 23824 | state->announce (what: "finishing" ); |
| 23825 | |
| 23826 | cookie->config.active_init = init_fn_non_empty; |
| 23827 | if (cookie->began) |
| 23828 | state->write_end (to: &cookie->out, reader, config&: cookie->config, crc&: cookie->crc); |
| 23829 | |
| 23830 | if (cookie->out.end () && cookie->cmi_name) |
| 23831 | { |
| 23832 | /* Some OS's do not replace NEWNAME if it already exists. |
| 23833 | This'll have a race condition in erroneous concurrent |
| 23834 | builds. */ |
| 23835 | unlink (name: cookie->cmi_name); |
| 23836 | if (rename (old: cookie->tmp_name, new: cookie->cmi_name)) |
| 23837 | { |
| 23838 | dump () && dump ("Rename ('%s','%s') errno=%u" , |
| 23839 | cookie->tmp_name, cookie->cmi_name, errno); |
| 23840 | cookie->out.set_error (errno); |
| 23841 | } |
| 23842 | } |
| 23843 | |
| 23844 | if (cookie->out.get_error () && cookie->began) |
| 23845 | { |
| 23846 | error_at (state->loc, "failed to write compiled module: %s" , |
| 23847 | cookie->out.get_error (name: state->filename)); |
| 23848 | state->note_cmi_name (); |
| 23849 | } |
| 23850 | |
| 23851 | if (!errorcount) |
| 23852 | { |
| 23853 | auto *mapper = get_mapper (loc: cpp_main_loc (reader), deps: cpp_get_deps (reader)); |
| 23854 | mapper->ModuleCompiled (str: state->get_flatname ()); |
| 23855 | } |
| 23856 | else if (cookie->cmi_name) |
| 23857 | { |
| 23858 | /* We failed, attempt to erase all evidence we even tried. */ |
| 23859 | unlink (name: cookie->tmp_name); |
| 23860 | unlink (name: cookie->cmi_name); |
| 23861 | } |
| 23862 | |
| 23863 | delete cookie; |
| 23864 | dump.pop (n); |
| 23865 | timevar_stop (TV_MODULE_EXPORT); |
| 23866 | } |
| 23867 | |
| 23868 | void |
| 23869 | fini_modules (cpp_reader *reader, void *cookie, bool has_inits) |
| 23870 | { |
| 23871 | if (cookie) |
| 23872 | late_finish_module (reader, |
| 23873 | cookie: static_cast<module_processing_cookie *> (cookie), |
| 23874 | init_fn_non_empty: has_inits); |
| 23875 | |
| 23876 | /* We're done with the macro tables now. */ |
| 23877 | vec_free (v&: macro_exports); |
| 23878 | vec_free (v&: macro_imports); |
| 23879 | headers = NULL; |
| 23880 | |
| 23881 | /* We're now done with everything but the module names. */ |
| 23882 | set_cmi_repo (NULL); |
| 23883 | if (mapper) |
| 23884 | { |
| 23885 | timevar_start (TV_MODULE_MAPPER); |
| 23886 | module_client::close_module_client (loc: 0, mapper); |
| 23887 | mapper = nullptr; |
| 23888 | timevar_stop (TV_MODULE_MAPPER); |
| 23889 | } |
| 23890 | module_state_config::release (); |
| 23891 | |
| 23892 | #if CHECKING_P |
| 23893 | note_defs = NULL; |
| 23894 | #endif |
| 23895 | |
| 23896 | if (modules) |
| 23897 | for (unsigned ix = modules->length (); --ix;) |
| 23898 | if (module_state *state = (*modules)[ix]) |
| 23899 | state->release (); |
| 23900 | |
| 23901 | /* No need to lookup modules anymore. */ |
| 23902 | modules_hash = NULL; |
| 23903 | |
| 23904 | /* Or entity array. We still need the entity map to find import numbers. */ |
| 23905 | vec_free (v&: entity_ary); |
| 23906 | entity_ary = NULL; |
| 23907 | |
| 23908 | /* Or remember any pending entities. */ |
| 23909 | delete pending_table; |
| 23910 | pending_table = NULL; |
| 23911 | |
| 23912 | /* Or any keys -- Let it go! */ |
| 23913 | delete keyed_table; |
| 23914 | keyed_table = NULL; |
| 23915 | |
| 23916 | /* Allow a GC, we've possibly made much data unreachable. */ |
| 23917 | ggc_collect (); |
| 23918 | } |
| 23919 | |
| 23920 | /* If CODE is a module option, handle it & return true. Otherwise |
| 23921 | return false. For unknown reasons I cannot get the option |
| 23922 | generation machinery to set fmodule-mapper or -fmodule-header to |
| 23923 | make a string type option variable. */ |
| 23924 | |
| 23925 | bool |
| 23926 | handle_module_option (unsigned code, const char *str, int) |
| 23927 | { |
| 23928 | auto hdr = CMS_header; |
| 23929 | |
| 23930 | switch (opt_code (code)) |
| 23931 | { |
| 23932 | case OPT_fmodule_mapper_: |
| 23933 | module_mapper_name = str; |
| 23934 | return true; |
| 23935 | |
| 23936 | case OPT_fmodule_header_: |
| 23937 | { |
| 23938 | if (!strcmp (s1: str, s2: "user" )) |
| 23939 | hdr = CMS_user; |
| 23940 | else if (!strcmp (s1: str, s2: "system" )) |
| 23941 | hdr = CMS_system; |
| 23942 | else |
| 23943 | error ("unknown header kind %qs" , str); |
| 23944 | } |
| 23945 | /* Fallthrough. */ |
| 23946 | |
| 23947 | case OPT_fmodule_header: |
| 23948 | flag_header_unit = hdr; |
| 23949 | flag_modules = 1; |
| 23950 | return true; |
| 23951 | |
| 23952 | case OPT_flang_info_include_translate_: |
| 23953 | vec_safe_push (v&: note_includes, obj: str); |
| 23954 | return true; |
| 23955 | |
| 23956 | case OPT_flang_info_module_cmi_: |
| 23957 | vec_safe_push (v&: note_cmis, obj: str); |
| 23958 | return true; |
| 23959 | |
| 23960 | default: |
| 23961 | return false; |
| 23962 | } |
| 23963 | } |
| 23964 | |
| 23965 | /* Set preprocessor callbacks and options for modules. */ |
| 23966 | |
| 23967 | void |
| 23968 | module_preprocess_options (cpp_reader *reader) |
| 23969 | { |
| 23970 | gcc_checking_assert (!lang_hooks.preprocess_undef); |
| 23971 | if (modules_p ()) |
| 23972 | { |
| 23973 | auto *cb = cpp_get_callbacks (reader); |
| 23974 | |
| 23975 | cb->translate_include = maybe_translate_include; |
| 23976 | cb->user_deferred_macro = module_state::deferred_macro; |
| 23977 | if (flag_header_unit) |
| 23978 | { |
| 23979 | /* If the preprocessor hook is already in use, that |
| 23980 | implementation will call the undef langhook. */ |
| 23981 | if (cb->undef) |
| 23982 | lang_hooks.preprocess_undef = module_state::undef_macro; |
| 23983 | else |
| 23984 | cb->undef = module_state::undef_macro; |
| 23985 | } |
| 23986 | auto *opt = cpp_get_options (reader); |
| 23987 | opt->module_directives = true; |
| 23988 | if (flag_no_output) |
| 23989 | opt->directives_only = true; |
| 23990 | if (opt->main_search == CMS_none) |
| 23991 | opt->main_search = cpp_main_search (flag_header_unit); |
| 23992 | } |
| 23993 | } |
| 23994 | |
| 23995 | #include "gt-cp-module.h" |
| 23996 | |