aboutsummaryrefslogtreecommitdiff
path: root/libjava/java/util
diff options
context:
space:
mode:
authorMomchil Velikov <velco@fadata.bg>2002-01-23 00:31:52 +0200
committerRichard Henderson <rth@gcc.gnu.org>2002-01-22 14:31:52 -0800
commite64d0626b4e19630c519fa054828b6e9c3e2b161 (patch)
tree50776c386e475ef991d61b91cd8bd81eb7a69e3a /libjava/java/util
parentd04fd40f86bb40d9267ef40419f021d6bcdeda23 (diff)
downloadgcc-e64d0626b4e19630c519fa054828b6e9c3e2b161.zip
gcc-e64d0626b4e19630c519fa054828b6e9c3e2b161.tar.gz
gcc-e64d0626b4e19630c519fa054828b6e9c3e2b161.tar.bz2
configure.in (variable detection): Use arrays of unspecified size instead of plain integers.
* configure.in (variable detection): Use arrays of unspecified size instead of plain integers. From-SVN: r49103
Diffstat (limited to 'libjava/java/util')
0 files changed, 0 insertions, 0 deletions
f='#n142'>142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422 2423 2424 2425 2426 2427 2428 2429 2430 2431 2432 2433 2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465 2466 2467 2468 2469 2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511 2512 2513 2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555 2556 2557 2558 2559 2560 2561 2562 2563 2564 2565 2566 2567 2568 2569 2570 2571 2572 2573 2574 2575 2576 2577 2578 2579 2580 2581 2582 2583 2584 2585 2586 2587 2588 2589 2590 2591 2592 2593 2594 2595 2596 2597 2598 2599 2600 2601 2602 2603 2604 2605 2606 2607 2608 2609 2610 2611 2612 2613 2614 2615 2616 2617 2618 2619 2620 2621 2622 2623 2624 2625 2626 2627 2628 2629 2630 2631 2632 2633 2634 2635 2636 2637 2638 2639 2640 2641 2642 2643 2644 2645 2646 2647 2648 2649 2650 2651 2652 2653 2654 2655 2656 2657 2658 2659 2660 2661 2662 2663 2664 2665 2666 2667 2668 2669 2670 2671 2672 2673 2674 2675 2676 2677 2678 2679 2680 2681 2682 2683 2684 2685 2686 2687 2688 2689 2690 2691 2692 2693 2694 2695 2696 2697 2698 2699 2700 2701 2702 2703 2704 2705 2706 2707 2708 2709 2710 2711 2712 2713 2714 2715 2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805 2806 2807 2808 2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860 2861 2862 2863 2864 2865 2866 2867 2868 2869 2870 2871 2872 2873 2874 2875 2876 2877 2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958 2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987 2988 2989 2990 2991 2992 2993 2994 2995 2996 2997 2998 2999 3000 3001 3002 3003 3004 3005 3006 3007 3008 3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020 3021 3022 3023 3024 3025 3026 3027 3028 3029 3030 3031 3032 3033 3034 3035 3036 3037 3038 3039 3040 3041 3042 3043 3044 3045 3046 3047 3048 3049 3050 3051 3052 3053 3054 3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067 3068 3069 3070 3071 3072 3073 3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160 3161 3162 3163 3164 3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201 3202 3203 3204 3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219 3220 3221 3222 3223 3224 3225 3226 3227 3228 3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265 3266 3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277 3278 3279 3280 3281 3282 3283 3284 3285 3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317 3318 3319 3320 3321 3322 3323 3324 3325 3326 3327 3328 3329 3330 3331 3332 3333 3334 3335 3336 3337 3338 3339 3340 3341 3342 3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368 3369 3370 3371 3372 3373 3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393 3394 3395 3396 3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407 3408 3409 3410 3411 3412 3413 3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431 3432 3433 3434 3435 3436 3437 3438 3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451 3452 3453 3454 3455 3456 3457 3458 3459 3460 3461 3462 3463 3464 3465 3466 3467 3468 3469 3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491 3492 3493 3494 3495 3496 3497 3498 3499 3500 3501 3502 3503 3504 3505 3506 3507 3508 3509 3510 3511 3512 3513 3514 3515 3516 3517 3518 3519 3520 3521 3522 3523 3524 3525 3526 3527 3528 3529 3530 3531 3532 3533 3534 3535 3536 3537 3538 3539 3540 3541 3542 3543 3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557 3558 3559 3560 3561 3562 3563 3564 3565 3566 3567 3568 3569 3570 3571 3572 3573 3574 3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606 3607 3608 3609 3610 3611 3612 3613 3614 3615 3616 3617 3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638 3639 3640 3641 3642 3643 3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673 3674 3675 3676 3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705 3706 3707 3708 3709 3710 3711 3712 3713 3714 3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758 3759 3760 3761 3762 3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776 3777 3778 3779 3780 3781 3782 3783 3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797 3798 3799 3800 3801 3802 3803 3804 3805 3806 3807 3808 3809 3810 3811 3812 3813 3814 3815 3816 3817 3818 3819 3820 3821 3822 3823 3824 3825 3826 3827 3828 3829 3830 3831 3832 3833 3834 3835 3836 3837 3838 3839 3840 3841 3842 3843 3844 3845 3846 3847 3848 3849 3850 3851 3852 3853 3854 3855 3856 3857 3858 3859 3860 3861 3862 3863 3864 3865 3866 3867 3868 3869 3870 3871 3872 3873 3874 3875 3876 3877 3878 3879 3880 3881 3882 3883 3884 3885 3886 3887 3888 3889 3890 3891 3892 3893 3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906 3907 3908 3909 3910 3911 3912 3913 3914 3915 3916 3917 3918 3919 3920 3921 3922 3923 3924 3925 3926 3927 3928 3929 3930 3931 3932 3933 3934 3935 3936 3937 3938 3939 3940 3941 3942 3943 3944 3945 3946 3947 3948 3949 3950 3951 3952 3953 3954 3955 3956 3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971 3972 3973 3974 3975 3976 3977 3978 3979 3980 3981 3982 3983 3984 3985 3986 3987 3988 3989 3990 3991 3992 3993 3994 3995 3996 3997 3998 3999 4000 4001 4002 4003 4004 4005 4006 4007 4008 4009 4010 4011 4012 4013 4014 4015 4016 4017 4018 4019 4020 4021 4022 4023 4024 4025 4026 4027 4028 4029 4030 4031 4032 4033 4034 4035 4036 4037 4038 4039 4040 4041 4042 4043 4044 4045 4046 4047 4048 4049 4050 4051 4052 4053 4054 4055 4056 4057 4058 4059 4060 4061 4062 4063 4064 4065 4066 4067 4068 4069 4070 4071 4072 4073 4074 4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089 4090 4091 4092 4093 4094 4095 4096 4097 4098 4099 4100 4101 4102 4103 4104 4105 4106 4107 4108 4109 4110 4111 4112 4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133 4134 4135 4136 4137 4138 4139 4140 4141 4142 4143 4144 4145 4146 4147 4148 4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171 4172 4173 4174 4175 4176 4177 4178 4179 4180 4181 4182 4183 4184 4185 4186 4187 4188 4189 4190 4191 4192 4193 4194 4195 4196 4197 4198 4199 4200 4201 4202 4203 4204 4205 4206 4207 4208 4209 4210 4211 4212 4213 4214 4215 4216 4217 4218 4219 4220 4221 4222 4223 4224 4225 4226 4227 4228 4229 4230 4231 4232 4233 4234 4235 4236 4237 4238 4239 4240 4241 4242 4243 4244 4245 4246 4247 4248 4249 4250 4251 4252 4253 4254 4255 4256 4257 4258 4259 4260 4261 4262 4263 4264 4265 4266 4267 4268 4269 4270 4271 4272 4273 4274 4275 4276 4277 4278 4279 4280 4281 4282 4283 4284 4285 4286 4287 4288 4289 4290 4291 4292 4293 4294 4295 4296 4297 4298 4299 4300 4301 4302 4303 4304 4305 4306 4307 4308 4309 4310 4311 4312 4313 4314 4315 4316 4317 4318 4319 4320 4321 4322 4323 4324 4325 4326 4327 4328 4329 4330 4331 4332 4333 4334 4335 4336 4337 4338 4339 4340 4341 4342 4343 4344 4345 4346 4347 4348 4349 4350 4351 4352 4353 4354 4355 4356 4357 4358 4359 4360 4361 4362 4363 4364 4365 4366 4367 4368 4369 4370 4371 4372 4373 4374 4375 4376 4377 4378 4379 4380 4381 4382 4383 4384 4385 4386 4387 4388 4389 4390 4391 4392 4393 4394 4395 4396 4397 4398 4399 4400 4401 4402 4403 4404 4405 4406 4407 4408 4409 4410 4411 4412 4413 4414 4415 4416 4417 4418 4419 4420 4421 4422 4423 4424 4425 4426 4427 4428 4429 4430 4431 4432 4433 4434 4435 4436 4437 4438 4439 4440 4441 4442 4443 4444 4445 4446 4447 4448 4449 4450 4451 4452 4453 4454 4455 4456 4457 4458 4459 4460 4461 4462 4463 4464 4465 4466 4467 4468 4469 4470 4471 4472 4473 4474 4475 4476 4477 4478 4479 4480 4481 4482 4483 4484 4485 4486 4487 4488 4489 4490 4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512 4513 4514 4515 4516 4517 4518 4519 4520 4521 4522 4523 4524 4525 4526 4527 4528 4529 4530 4531 4532 4533 4534 4535 4536 4537 4538 4539 4540 4541 4542 4543 4544 4545 4546 4547 4548 4549 4550 4551 4552 4553 4554 4555 4556 4557 4558 4559 4560 4561 4562 4563 4564 4565 4566 4567 4568 4569 4570 4571 4572 4573 4574 4575 4576 4577 4578 4579 4580 4581 4582 4583 4584 4585 4586 4587 4588 4589 4590 4591 4592 4593 4594 4595 4596 4597 4598 4599 4600 4601 4602 4603 4604 4605 4606 4607 4608 4609 4610 4611 4612 4613 4614 4615 4616 4617 4618 4619 4620 4621 4622 4623 4624 4625 4626 4627 4628 4629 4630 4631 4632 4633 4634 4635 4636 4637 4638 4639 4640 4641 4642 4643 4644 4645 4646 4647 4648 4649 4650 4651 4652 4653 4654 4655 4656 4657 4658 4659 4660 4661 4662 4663 4664 4665 4666 4667 4668 4669 4670 4671 4672 4673 4674 4675 4676 4677 4678 4679 4680 4681 4682 4683 4684 4685 4686 4687 4688 4689 4690 4691 4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704 4705 4706 4707 4708 4709 4710 4711 4712 4713 4714 4715 4716 4717 4718 4719 4720 4721 4722 4723 4724 4725 4726 4727 4728 4729 4730 4731 4732 4733 4734 4735 4736 4737 4738 4739 4740 4741 4742 4743 4744 4745 4746 4747 4748 4749 4750 4751 4752 4753 4754 4755 4756 4757 4758 4759 4760 4761 4762 4763 4764 4765 4766 4767 4768 4769 4770 4771 4772 4773 4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792 4793 4794 4795 4796 4797 4798 4799 4800 4801 4802 4803 4804 4805 4806 4807 4808 4809 4810 4811 4812 4813 4814 4815 4816 4817 4818 4819 4820 4821 4822 4823 4824 4825 4826 4827 4828 4829 4830 4831 4832 4833 4834 4835 4836 4837 4838 4839 4840 4841 4842 4843 4844 4845 4846 4847 4848 4849 4850 4851 4852 4853 4854 4855 4856 4857 4858 4859 4860 4861 4862 4863 4864 4865 4866 4867 4868 4869 4870 4871 4872 4873 4874 4875 4876 4877 4878 4879 4880 4881 4882 4883 4884 4885 4886 4887 4888 4889 4890 4891 4892 4893 4894 4895 4896 4897 4898 4899 4900 4901 4902 4903 4904 4905 4906 4907 4908 4909 4910 4911 4912 4913 4914 4915 4916 4917 4918 4919 4920 4921 4922 4923 4924 4925 4926 4927 4928 4929 4930 4931 4932 4933 4934 4935 4936 4937 4938 4939 4940 4941 4942 4943 4944 4945 4946 4947 4948 4949 4950 4951 4952 4953 4954 4955 4956 4957 4958 4959 4960 4961 4962 4963 4964 4965 4966 4967 4968 4969 4970 4971 4972 4973 4974 4975 4976 4977 4978 4979 4980 4981 4982 4983 4984 4985 4986 4987 4988 4989 4990 4991 4992 4993 4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004 5005 5006 5007 5008 5009 5010 5011 5012 5013 5014 5015 5016 5017 5018 5019 5020 5021 5022 5023 5024 5025 5026 5027 5028 5029 5030 5031 5032 5033 5034 5035 5036 5037 5038 5039 5040 5041 5042 5043 5044 5045 5046 5047 5048 5049 5050 5051 5052 5053 5054 5055 5056 5057 5058 5059 5060 5061 5062 5063 5064 5065 5066 5067 5068 5069 5070 5071 5072 5073 5074 5075 5076 5077 5078 5079 5080 5081 5082 5083 5084 5085 5086 5087 5088 5089 5090 5091 5092 5093 5094 5095 5096 5097 5098 5099 5100 5101 5102 5103 5104 5105 5106 5107 5108 5109 5110 5111 5112 5113 5114 5115 5116 5117 5118 5119 5120 5121 5122 5123 5124 5125 5126 5127 5128 5129 5130 5131 5132 5133 5134 5135 5136 5137 5138 5139 5140 5141 5142 5143 5144 5145 5146 5147 5148 5149 5150 5151 5152 5153 5154 5155 5156 5157 5158 5159 5160 5161 5162 5163 5164 5165 5166 5167 5168 5169 5170 5171 5172 5173 5174 5175 5176 5177 5178 5179 5180 5181 5182 5183 5184 5185 5186 5187 5188 5189 5190 5191 5192 5193 5194 5195 5196 5197 5198 5199 5200 5201 5202 5203 5204 5205 5206 5207 5208 5209 5210 5211 5212 5213 5214 5215 5216 5217 5218 5219 5220 5221 5222 5223 5224 5225 5226 5227 5228 5229 5230 5231 5232 5233 5234 5235 5236 5237 5238 5239 5240 5241 5242 5243 5244 5245 5246 5247 5248 5249 5250 5251 5252 5253 5254 5255 5256 5257 5258 5259 5260 5261 5262 5263 5264 5265 5266 5267 5268 5269 5270 5271 5272 5273 5274 5275 5276 5277 5278 5279 5280 5281 5282 5283 5284 5285 5286 5287 5288 5289 5290 5291 5292 5293 5294 5295 5296 5297 5298 5299 5300 5301 5302 5303 5304 5305 5306 5307 5308 5309 5310 5311 5312 5313 5314 5315 5316 5317 5318 5319 5320 5321 5322 5323 5324 5325 5326 5327 5328 5329 5330 5331 5332 5333 5334 5335 5336 5337 5338 5339 5340 5341 5342 5343 5344 5345 5346 5347 5348 5349 5350 5351 5352 5353 5354 5355 5356 5357 5358 5359 5360 5361 5362 5363 5364 5365 5366 5367 5368 5369 5370 5371 5372 5373 5374 5375 5376 5377 5378 5379 5380 5381 5382 5383 5384 5385 5386 5387 5388 5389 5390 5391 5392 5393 5394 5395 5396 5397 5398 5399 5400 5401 5402 5403 5404 5405 5406 5407 5408 5409 5410 5411 5412 5413 5414 5415 5416 5417 5418 5419 5420 5421 5422 5423 5424 5425 5426 5427 5428 5429 5430 5431 5432 5433 5434 5435 5436 5437 5438 5439 5440 5441 5442 5443 5444 5445 5446 5447 5448 5449 5450 5451 5452 5453 5454 5455 5456 5457 5458 5459 5460 5461 5462 5463 5464 5465 5466 5467 5468 5469 5470 5471 5472 5473 5474 5475 5476 5477 5478 5479 5480 5481 5482 5483 5484 5485 5486 5487 5488 5489 5490 5491 5492 5493 5494 5495 5496 5497 5498 5499 5500 5501 5502 5503 5504 5505 5506 5507 5508 5509 5510 5511 5512 5513 5514 5515 5516 5517 5518 5519 5520 5521 5522 5523 5524 5525 5526 5527 5528 5529 5530 5531 5532 5533 5534 5535 5536 5537 5538 5539 5540 5541 5542 5543 5544 5545 5546 5547 5548 5549 5550 5551 5552 5553 5554 5555 5556 5557 5558 5559 5560 5561 5562 5563 5564 5565 5566 5567 5568 5569 5570 5571 5572 5573 5574 5575 5576 5577 5578 5579 5580 5581 5582 5583 5584 5585 5586 5587 5588 5589 5590 5591 5592 5593 5594 5595 5596 5597 5598 5599 5600 5601 5602 5603 5604 5605 5606 5607 5608 5609 5610 5611 5612 5613 5614 5615 5616 5617 5618 5619 5620 5621 5622 5623 5624 5625 5626 5627 5628 5629 5630 5631 5632 5633 5634 5635 5636 5637 5638 5639 5640 5641 5642 5643 5644 5645 5646 5647 5648 5649 5650 5651 5652 5653 5654 5655 5656 5657 5658 5659 5660 5661 5662 5663 5664 5665 5666 5667 5668 5669 5670 5671 5672 5673 5674 5675 5676 5677 5678 5679 5680 5681 5682 5683 5684 5685 5686 5687 5688 5689 5690 5691 5692 5693 5694 5695 5696 5697 5698 5699 5700 5701 5702 5703 5704 5705 5706 5707 5708 5709 5710 5711 5712 5713 5714 5715 5716 5717 5718 5719 5720 5721 5722 5723 5724 5725 5726 5727 5728 5729 5730 5731 5732 5733 5734 5735 5736 5737 5738 5739 5740 5741 5742 5743 5744 5745 5746 5747 5748 5749 5750 5751 5752 5753 5754 5755 5756 5757 5758 5759 5760 5761 5762 5763 5764 5765 5766 5767 5768 5769 5770 5771 5772 5773 5774 5775 5776 5777 5778 5779 5780 5781 5782 5783 5784 5785 5786 5787 5788 5789 5790 5791 5792 5793 5794 5795 5796 5797 5798 5799 5800 5801 5802 5803 5804 5805 5806 5807 5808 5809 5810 5811 5812 5813 5814 5815 5816 5817 5818 5819 5820 5821 5822 5823 5824 5825 5826 5827 5828 5829 5830 5831 5832 5833 5834 5835 5836 5837 5838 5839 5840 5841 5842 5843 5844 5845 5846 5847 5848 5849 5850 5851 5852 5853 5854 5855 5856 5857 5858 5859 5860 5861 5862 5863 5864 5865 5866 5867 5868 5869 5870 5871 5872 5873 5874 5875 5876 5877 5878 5879 5880 5881 5882 5883 5884 5885 5886 5887 5888 5889 5890 5891 5892 5893 5894 5895 5896 5897 5898 5899 5900 5901 5902 5903 5904 5905 5906 5907 5908 5909 5910 5911 5912 5913 5914 5915 5916 5917 5918 5919 5920 5921 5922 5923 5924 5925 5926 5927 5928 5929 5930 5931 5932 5933 5934 5935 5936 5937 5938 5939 5940 5941 5942 5943 5944 5945 5946 5947 5948 5949 5950 5951 5952 5953 5954 5955 5956 5957 5958 5959 5960 5961 5962 5963 5964 5965 5966 5967 5968 5969 5970 5971 5972 5973 5974 5975 5976 5977 5978 5979 5980 5981 5982 5983 5984 5985 5986 5987 5988 5989 5990 5991 5992 5993 5994 5995 5996 5997 5998 5999 6000 6001 6002 6003 6004 6005 6006 6007 6008 6009 6010 6011 6012 6013 6014 6015 6016 6017 6018 6019 6020 6021 6022 6023 6024 6025 6026 6027 6028 6029 6030 6031 6032 6033 6034 6035 6036 6037 6038 6039 6040 6041 6042 6043 6044 6045 6046 6047 6048 6049 6050 6051 6052 6053 6054 6055 6056
/* Emit RTL for the GCC expander.
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
   2010
   Free Software Foundation, Inc.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.

GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */


/* Middle-to-low level generation of rtx code and insns.

   This file contains support functions for creating rtl expressions
   and manipulating them in the doubly-linked chain of insns.

   The patterns of the insns are created by machine-dependent
   routines in insn-emit.c, which is generated automatically from
   the machine description.  These routines make the individual rtx's
   of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
   which are automatically generated from rtl.def; what is machine
   dependent is the kind of rtx's they make and what arguments they
   use.  */

#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "toplev.h"
#include "rtl.h"
#include "tree.h"
#include "tm_p.h"
#include "flags.h"
#include "function.h"
#include "expr.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "hashtab.h"
#include "insn-config.h"
#include "recog.h"
#include "real.h"
#include "fixed-value.h"
#include "bitmap.h"
#include "basic-block.h"
#include "ggc.h"
#include "debug.h"
#include "langhooks.h"
#include "tree-pass.h"
#include "df.h"
#include "params.h"
#include "target.h"

/* Commonly used modes.  */

enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */

/* Datastructures maintained for currently processed function in RTL form.  */

struct rtl_data x_rtl;

/* Indexed by pseudo register number, gives the rtx for that pseudo.
   Allocated in parallel with regno_pointer_align.
   FIXME: We could put it into emit_status struct, but gengtype is not able to deal
   with length attribute nested in top level structures.  */

rtx * regno_reg_rtx;

/* This is *not* reset after each function.  It gives each CODE_LABEL
   in the entire compilation a unique label number.  */

static GTY(()) int label_num = 1;

/* Commonly used rtx's, so that we only need space for one copy.
   These are initialized once for the entire compilation.
   All of these are unique; no other rtx-object will be equal to any
   of these.  */

rtx global_rtl[GR_MAX];

/* Commonly used RTL for hard registers.  These objects are not necessarily
   unique, so we allocate them separately from global_rtl.  They are
   initialized once per compilation unit, then copied into regno_reg_rtx
   at the beginning of each function.  */
static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];

/* We record floating-point CONST_DOUBLEs in each floating-point mode for
   the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
   record a copy of const[012]_rtx.  */

rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];

rtx const_true_rtx;

REAL_VALUE_TYPE dconst0;
REAL_VALUE_TYPE dconst1;
REAL_VALUE_TYPE dconst2;
REAL_VALUE_TYPE dconstm1;
REAL_VALUE_TYPE dconsthalf;

/* Record fixed-point constant 0 and 1.  */
FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
FIXED_VALUE_TYPE fconst1[MAX_FCONST1];

/* All references to the following fixed hard registers go through
   these unique rtl objects.  On machines where the frame-pointer and
   arg-pointer are the same register, they use the same unique object.

   After register allocation, other rtl objects which used to be pseudo-regs
   may be clobbered to refer to the frame-pointer register.
   But references that were originally to the frame-pointer can be
   distinguished from the others because they contain frame_pointer_rtx.

   When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
   tricky: until register elimination has taken place hard_frame_pointer_rtx
   should be used if it is being set, and frame_pointer_rtx otherwise.  After
   register elimination hard_frame_pointer_rtx should always be used.
   On machines where the two registers are same (most) then these are the
   same.

   In an inline procedure, the stack and frame pointer rtxs may not be
   used for anything else.  */
rtx pic_offset_table_rtx;	/* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */

/* This is used to implement __builtin_return_address for some machines.
   See for instance the MIPS port.  */
rtx return_address_pointer_rtx;	/* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */

/* We make one copy of (const_int C) where C is in
   [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
   to save space during the compilation and simplify comparisons of
   integers.  */

rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];

/* A hash table storing CONST_INTs whose absolute value is greater
   than MAX_SAVED_CONST_INT.  */

static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
     htab_t const_int_htab;

/* A hash table storing memory attribute structures.  */
static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
     htab_t mem_attrs_htab;

/* A hash table storing register attribute structures.  */
static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
     htab_t reg_attrs_htab;

/* A hash table storing all CONST_DOUBLEs.  */
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
     htab_t const_double_htab;

/* A hash table storing all CONST_FIXEDs.  */
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
     htab_t const_fixed_htab;

#define first_insn (crtl->emit.x_first_insn)
#define last_insn (crtl->emit.x_last_insn)
#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
#define last_location (crtl->emit.x_last_location)
#define first_label_num (crtl->emit.x_first_label_num)

static rtx make_call_insn_raw (rtx);
static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
static void set_used_decls (tree);
static void mark_label_nuses (rtx);
static hashval_t const_int_htab_hash (const void *);
static int const_int_htab_eq (const void *, const void *);
static hashval_t const_double_htab_hash (const void *);
static int const_double_htab_eq (const void *, const void *);
static rtx lookup_const_double (rtx);
static hashval_t const_fixed_htab_hash (const void *);
static int const_fixed_htab_eq (const void *, const void *);
static rtx lookup_const_fixed (rtx);
static hashval_t mem_attrs_htab_hash (const void *);
static int mem_attrs_htab_eq (const void *, const void *);
static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
				 addr_space_t, enum machine_mode);
static hashval_t reg_attrs_htab_hash (const void *);
static int reg_attrs_htab_eq (const void *, const void *);
static reg_attrs *get_reg_attrs (tree, int);
static rtx gen_const_vector (enum machine_mode, int);
static void copy_rtx_if_shared_1 (rtx *orig);

/* Probability of the conditional branch currently proceeded by try_split.
   Set to -1 otherwise.  */
int split_branch_probability = -1;

/* Returns a hash code for X (which is a really a CONST_INT).  */

static hashval_t
const_int_htab_hash (const void *x)
{
  return (hashval_t) INTVAL ((const_rtx) x);
}

/* Returns nonzero if the value represented by X (which is really a
   CONST_INT) is the same as that given by Y (which is really a
   HOST_WIDE_INT *).  */

static int
const_int_htab_eq (const void *x, const void *y)
{
  return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
}

/* Returns a hash code for X (which is really a CONST_DOUBLE).  */
static hashval_t
const_double_htab_hash (const void *x)
{
  const_rtx const value = (const_rtx) x;
  hashval_t h;

  if (GET_MODE (value) == VOIDmode)
    h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
  else
    {
      h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
      /* MODE is used in the comparison, so it should be in the hash.  */
      h ^= GET_MODE (value);
    }
  return h;
}

/* Returns nonzero if the value represented by X (really a ...)
   is the same as that represented by Y (really a ...) */
static int
const_double_htab_eq (const void *x, const void *y)
{
  const_rtx const a = (const_rtx)x, b = (const_rtx)y;

  if (GET_MODE (a) != GET_MODE (b))
    return 0;
  if (GET_MODE (a) == VOIDmode)
    return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
  else
    return real_identical (CONST_DOUBLE_REAL_VALUE (a),
			   CONST_DOUBLE_REAL_VALUE (b));
}

/* Returns a hash code for X (which is really a CONST_FIXED).  */

static hashval_t
const_fixed_htab_hash (const void *x)
{
  const_rtx const value = (const_rtx) x;
  hashval_t h;

  h = fixed_hash (CONST_FIXED_VALUE (value));
  /* MODE is used in the comparison, so it should be in the hash.  */
  h ^= GET_MODE (value);
  return h;
}

/* Returns nonzero if the value represented by X (really a ...)
   is the same as that represented by Y (really a ...).  */

static int
const_fixed_htab_eq (const void *x, const void *y)
{
  const_rtx const a = (const_rtx) x, b = (const_rtx) y;

  if (GET_MODE (a) != GET_MODE (b))
    return 0;
  return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
}

/* Returns a hash code for X (which is a really a mem_attrs *).  */

static hashval_t
mem_attrs_htab_hash (const void *x)
{
  const mem_attrs *const p = (const mem_attrs *) x;

  return (p->alias ^ (p->align * 1000)
	  ^ (p->addrspace * 4000)
	  ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
	  ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
	  ^ (size_t) iterative_hash_expr (p->expr, 0));
}

/* Returns nonzero if the value represented by X (which is really a
   mem_attrs *) is the same as that given by Y (which is also really a
   mem_attrs *).  */

static int
mem_attrs_htab_eq (const void *x, const void *y)
{
  const mem_attrs *const p = (const mem_attrs *) x;
  const mem_attrs *const q = (const mem_attrs *) y;

  return (p->alias == q->alias && p->offset == q->offset
	  && p->size == q->size && p->align == q->align
	  && p->addrspace == q->addrspace
	  && (p->expr == q->expr
	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
		  && operand_equal_p (p->expr, q->expr, 0))));
}

/* Allocate a new mem_attrs structure and insert it into the hash table if
   one identical to it is not already in the table.  We are doing this for
   MEM of mode MODE.  */

static mem_attrs *
get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
	       unsigned int align, addr_space_t addrspace, enum machine_mode mode)
{
  mem_attrs attrs;
  void **slot;

  /* If everything is the default, we can just return zero.
     This must match what the corresponding MEM_* macros return when the
     field is not present.  */
  if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
      && (size == 0
	  || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
      && (STRICT_ALIGNMENT && mode != BLKmode
	  ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
    return 0;

  attrs.alias = alias;
  attrs.expr = expr;
  attrs.offset = offset;
  attrs.size = size;
  attrs.align = align;
  attrs.addrspace = addrspace;

  slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
  if (*slot == 0)
    {
      *slot = ggc_alloc (sizeof (mem_attrs));
      memcpy (*slot, &attrs, sizeof (mem_attrs));
    }

  return (mem_attrs *) *slot;
}

/* Returns a hash code for X (which is a really a reg_attrs *).  */

static hashval_t
reg_attrs_htab_hash (const void *x)
{
  const reg_attrs *const p = (const reg_attrs *) x;

  return ((p->offset * 1000) ^ (long) p->decl);
}

/* Returns nonzero if the value represented by X (which is really a
   reg_attrs *) is the same as that given by Y (which is also really a
   reg_attrs *).  */

static int
reg_attrs_htab_eq (const void *x, const void *y)
{
  const reg_attrs *const p = (const reg_attrs *) x;
  const reg_attrs *const q = (const reg_attrs *) y;

  return (p->decl == q->decl && p->offset == q->offset);
}
/* Allocate a new reg_attrs structure and insert it into the hash table if
   one identical to it is not already in the table.  We are doing this for
   MEM of mode MODE.  */

static reg_attrs *
get_reg_attrs (tree decl, int offset)
{
  reg_attrs attrs;
  void **slot;

  /* If everything is the default, we can just return zero.  */
  if (decl == 0 && offset == 0)
    return 0;

  attrs.decl = decl;
  attrs.offset = offset;

  slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
  if (*slot == 0)
    {
      *slot = ggc_alloc (sizeof (reg_attrs));
      memcpy (*slot, &attrs, sizeof (reg_attrs));
    }

  return (reg_attrs *) *slot;
}


#if !HAVE_blockage
/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
   across this insn. */

rtx
gen_blockage (void)
{
  rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
  MEM_VOLATILE_P (x) = true;
  return x;
}
#endif


/* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
   don't attempt to share with the various global pieces of rtl (such as
   frame_pointer_rtx).  */

rtx
gen_raw_REG (enum machine_mode mode, int regno)
{
  rtx x = gen_rtx_raw_REG (mode, regno);
  ORIGINAL_REGNO (x) = regno;
  return x;
}

/* There are some RTL codes that require special attention; the generation
   functions do the raw handling.  If you add to this list, modify
   special_rtx in gengenrtl.c as well.  */

rtx
gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
{
  void **slot;

  if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
    return const_int_rtx[arg + MAX_SAVED_CONST_INT];

#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
  if (const_true_rtx && arg == STORE_FLAG_VALUE)
    return const_true_rtx;
#endif

  /* Look up the CONST_INT in the hash table.  */
  slot = htab_find_slot_with_hash (const_int_htab, &arg,
				   (hashval_t) arg, INSERT);
  if (*slot == 0)
    *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);

  return (rtx) *slot;
}

rtx
gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
{
  return GEN_INT (trunc_int_for_mode (c, mode));
}

/* CONST_DOUBLEs might be created from pairs of integers, or from
   REAL_VALUE_TYPEs.  Also, their length is known only at run time,
   so we cannot use gen_rtx_raw_CONST_DOUBLE.  */

/* Determine whether REAL, a CONST_DOUBLE, already exists in the
   hash table.  If so, return its counterpart; otherwise add it
   to the hash table and return it.  */
static rtx
lookup_const_double (rtx real)
{
  void **slot = htab_find_slot (const_double_htab, real, INSERT);
  if (*slot == 0)
    *slot = real;

  return (rtx) *slot;
}

/* Return a CONST_DOUBLE rtx for a floating-point value specified by
   VALUE in mode MODE.  */
rtx
const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
{
  rtx real = rtx_alloc (CONST_DOUBLE);
  PUT_MODE (real, mode);

  real->u.rv = value;

  return lookup_const_double (real);
}

/* Determine whether FIXED, a CONST_FIXED, already exists in the
   hash table.  If so, return its counterpart; otherwise add it
   to the hash table and return it.  */

static rtx
lookup_const_fixed (rtx fixed)
{
  void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
  if (*slot == 0)
    *slot = fixed;

  return (rtx) *slot;
}

/* Return a CONST_FIXED rtx for a fixed-point value specified by
   VALUE in mode MODE.  */

rtx
const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
{
  rtx fixed = rtx_alloc (CONST_FIXED);
  PUT_MODE (fixed, mode);

  fixed->u.fv = value;

  return lookup_const_fixed (fixed);
}

/* Return a CONST_DOUBLE or CONST_INT for a value specified as
   a double_int.  */

rtx
immed_double_int_const (double_int i, enum machine_mode mode)
{
  return immed_double_const (i.low, i.high, mode);
}

/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
   of ints: I0 is the low-order word and I1 is the high-order word.
   Do not use this routine for non-integer modes; convert to
   REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */

rtx
immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
{
  rtx value;
  unsigned int i;

  /* There are the following cases (note that there are no modes with
     HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):

     1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
	gen_int_mode.
     2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
	the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
	from copies of the sign bit, and sign of i0 and i1 are the same),  then
	we return a CONST_INT for i0.
     3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
  if (mode != VOIDmode)
    {
      gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
		  /* We can get a 0 for an error mark.  */
		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);

      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
	return gen_int_mode (i0, mode);

      gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
    }

  /* If this integer fits in one word, return a CONST_INT.  */
  if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
    return GEN_INT (i0);

  /* We use VOIDmode for integers.  */
  value = rtx_alloc (CONST_DOUBLE);
  PUT_MODE (value, VOIDmode);

  CONST_DOUBLE_LOW (value) = i0;
  CONST_DOUBLE_HIGH (value) = i1;

  for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
    XWINT (value, i) = 0;

  return lookup_const_double (value);
}

rtx
gen_rtx_REG (enum machine_mode mode, unsigned int regno)
{
  /* In case the MD file explicitly references the frame pointer, have
     all such references point to the same frame pointer.  This is
     used during frame pointer elimination to distinguish the explicit
     references to these registers from pseudos that happened to be
     assigned to them.

     If we have eliminated the frame pointer or arg pointer, we will
     be using it as a normal register, for example as a spill
     register.  In such cases, we might be accessing it in a mode that
     is not Pmode and therefore cannot use the pre-allocated rtx.

     Also don't do this when we are making new REGs in reload, since
     we don't want to get confused with the real pointers.  */

  if (mode == Pmode && !reload_in_progress)
    {
      if (regno == FRAME_POINTER_REGNUM
	  && (!reload_completed || frame_pointer_needed))
	return frame_pointer_rtx;
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
      if (regno == HARD_FRAME_POINTER_REGNUM
	  && (!reload_completed || frame_pointer_needed))
	return hard_frame_pointer_rtx;
#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
      if (regno == ARG_POINTER_REGNUM)
	return arg_pointer_rtx;
#endif
#ifdef RETURN_ADDRESS_POINTER_REGNUM
      if (regno == RETURN_ADDRESS_POINTER_REGNUM)
	return return_address_pointer_rtx;
#endif
      if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
	return pic_offset_table_rtx;
      if (regno == STACK_POINTER_REGNUM)
	return stack_pointer_rtx;
    }

#if 0
  /* If the per-function register table has been set up, try to re-use
     an existing entry in that table to avoid useless generation of RTL.

     This code is disabled for now until we can fix the various backends
     which depend on having non-shared hard registers in some cases.   Long
     term we want to re-enable this code as it can significantly cut down
     on the amount of useless RTL that gets generated.

     We'll also need to fix some code that runs after reload that wants to
     set ORIGINAL_REGNO.  */

  if (cfun
      && cfun->emit
      && regno_reg_rtx
      && regno < FIRST_PSEUDO_REGISTER
      && reg_raw_mode[regno] == mode)
    return regno_reg_rtx[regno];
#endif

  return gen_raw_REG (mode, regno);
}

rtx
gen_rtx_MEM (enum machine_mode mode, rtx addr)
{
  rtx rt = gen_rtx_raw_MEM (mode, addr);

  /* This field is not cleared by the mere allocation of the rtx, so
     we clear it here.  */
  MEM_ATTRS (rt) = 0;

  return rt;
}

/* Generate a memory referring to non-trapping constant memory.  */

rtx
gen_const_mem (enum machine_mode mode, rtx addr)
{
  rtx mem = gen_rtx_MEM (mode, addr);
  MEM_READONLY_P (mem) = 1;
  MEM_NOTRAP_P (mem) = 1;
  return mem;
}

/* Generate a MEM referring to fixed portions of the frame, e.g., register
   save areas.  */

rtx
gen_frame_mem (enum machine_mode mode, rtx addr)
{
  rtx mem = gen_rtx_MEM (mode, addr);
  MEM_NOTRAP_P (mem) = 1;
  set_mem_alias_set (mem, get_frame_alias_set ());
  return mem;
}

/* Generate a MEM referring to a temporary use of the stack, not part
    of the fixed stack frame.  For example, something which is pushed
    by a target splitter.  */
rtx
gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
{
  rtx mem = gen_rtx_MEM (mode, addr);
  MEM_NOTRAP_P (mem) = 1;
  if (!cfun->calls_alloca)
    set_mem_alias_set (mem, get_frame_alias_set ());
  return mem;
}

/* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
   this construct would be valid, and false otherwise.  */

bool
validate_subreg (enum machine_mode omode, enum machine_mode imode,
		 const_rtx reg, unsigned int offset)
{
  unsigned int isize = GET_MODE_SIZE (imode);
  unsigned int osize = GET_MODE_SIZE (omode);

  /* All subregs must be aligned.  */
  if (offset % osize != 0)
    return false;

  /* The subreg offset cannot be outside the inner object.  */
  if (offset >= isize)
    return false;

  /* ??? This should not be here.  Temporarily continue to allow word_mode
     subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
     Generally, backends are doing something sketchy but it'll take time to
     fix them all.  */
  if (omode == word_mode)
    ;
  /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
     is the culprit here, and not the backends.  */
  else if (osize >= UNITS_PER_WORD && isize >= osize)
    ;
  /* Allow component subregs of complex and vector.  Though given the below
     extraction rules, it's not always clear what that means.  */
  else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
	   && GET_MODE_INNER (imode) == omode)
    ;
  /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
     i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
     represent this.  It's questionable if this ought to be represented at
     all -- why can't this all be hidden in post-reload splitters that make
     arbitrarily mode changes to the registers themselves.  */
  else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
    ;
  /* Subregs involving floating point modes are not allowed to
     change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
     (subreg:SI (reg:DF) 0) isn't.  */
  else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
    {
      if (isize != osize)
	return false;
    }

  /* Paradoxical subregs must have offset zero.  */
  if (osize > isize)
    return offset == 0;

  /* This is a normal subreg.  Verify that the offset is representable.  */

  /* For hard registers, we already have most of these rules collected in
     subreg_offset_representable_p.  */
  if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
    {
      unsigned int regno = REGNO (reg);

#ifdef CANNOT_CHANGE_MODE_CLASS
      if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
	  && GET_MODE_INNER (imode) == omode)
	;
      else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
	return false;
#endif

      return subreg_offset_representable_p (regno, imode, offset, omode);
    }

  /* For pseudo registers, we want most of the same checks.  Namely:
     If the register no larger than a word, the subreg must be lowpart.
     If the register is larger than a word, the subreg must be the lowpart
     of a subword.  A subreg does *not* perform arbitrary bit extraction.
     Given that we've already checked mode/offset alignment, we only have
     to check subword subregs here.  */
  if (osize < UNITS_PER_WORD)
    {
      enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
      unsigned int low_off = subreg_lowpart_offset (omode, wmode);
      if (offset % UNITS_PER_WORD != low_off)
	return false;
    }
  return true;
}

rtx
gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
{
  gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
  return gen_rtx_raw_SUBREG (mode, reg, offset);
}

/* Generate a SUBREG representing the least-significant part of REG if MODE
   is smaller than mode of REG, otherwise paradoxical SUBREG.  */

rtx
gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
{
  enum machine_mode inmode;

  inmode = GET_MODE (reg);
  if (inmode == VOIDmode)
    inmode = mode;
  return gen_rtx_SUBREG (mode, reg,
			 subreg_lowpart_offset (mode, inmode));
}


/* Create an rtvec and stores within it the RTXen passed in the arguments.  */

rtvec
gen_rtvec (int n, ...)
{
  int i;
  rtvec rt_val;
  va_list p;

  va_start (p, n);

  /* Don't allocate an empty rtvec...  */
  if (n == 0)
    return NULL_RTVEC;

  rt_val = rtvec_alloc (n);

  for (i = 0; i < n; i++)
    rt_val->elem[i] = va_arg (p, rtx);

  va_end (p);
  return rt_val;
}

rtvec
gen_rtvec_v (int n, rtx *argp)
{
  int i;
  rtvec rt_val;

  /* Don't allocate an empty rtvec...  */
  if (n == 0)
    return NULL_RTVEC;

  rt_val = rtvec_alloc (n);

  for (i = 0; i < n; i++)
    rt_val->elem[i] = *argp++;

  return rt_val;
}

/* Return the number of bytes between the start of an OUTER_MODE
   in-memory value and the start of an INNER_MODE in-memory value,
   given that the former is a lowpart of the latter.  It may be a
   paradoxical lowpart, in which case the offset will be negative
   on big-endian targets.  */

int
byte_lowpart_offset (enum machine_mode outer_mode,
		     enum machine_mode inner_mode)
{
  if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
    return subreg_lowpart_offset (outer_mode, inner_mode);
  else
    return -subreg_lowpart_offset (inner_mode, outer_mode);
}

/* Generate a REG rtx for a new pseudo register of mode MODE.
   This pseudo is assigned the next sequential register number.  */

rtx
gen_reg_rtx (enum machine_mode mode)
{
  rtx val;
  unsigned int align = GET_MODE_ALIGNMENT (mode);

  gcc_assert (can_create_pseudo_p ());

  /* If a virtual register with bigger mode alignment is generated,
     increase stack alignment estimation because it might be spilled
     to stack later.  */
  if (SUPPORTS_STACK_ALIGNMENT
      && crtl->stack_alignment_estimated < align
      && !crtl->stack_realign_processed)
    {
      unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
      if (crtl->stack_alignment_estimated < min_align)
	crtl->stack_alignment_estimated = min_align;
    }

  if (generating_concat_p
      && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
    {
      /* For complex modes, don't make a single pseudo.
	 Instead, make a CONCAT of two pseudos.
	 This allows noncontiguous allocation of the real and imaginary parts,
	 which makes much better code.  Besides, allocating DCmode
	 pseudos overstrains reload on some machines like the 386.  */
      rtx realpart, imagpart;
      enum machine_mode partmode = GET_MODE_INNER (mode);

      realpart = gen_reg_rtx (partmode);
      imagpart = gen_reg_rtx (partmode);
      return gen_rtx_CONCAT (mode, realpart, imagpart);
    }

  /* Make sure regno_pointer_align, and regno_reg_rtx are large
     enough to have an element for this pseudo reg number.  */

  if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
    {
      int old_size = crtl->emit.regno_pointer_align_length;
      char *tmp;
      rtx *new1;

      tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
      memset (tmp + old_size, 0, old_size);
      crtl->emit.regno_pointer_align = (unsigned char *) tmp;

      new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
      memset (new1 + old_size, 0, old_size * sizeof (rtx));
      regno_reg_rtx = new1;

      crtl->emit.regno_pointer_align_length = old_size * 2;
    }

  val = gen_raw_REG (mode, reg_rtx_no);
  regno_reg_rtx[reg_rtx_no++] = val;
  return val;
}

/* Update NEW with the same attributes as REG, but with OFFSET added
   to the REG_OFFSET.  */

static void
update_reg_offset (rtx new_rtx, rtx reg, int offset)
{
  REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
				   REG_OFFSET (reg) + offset);
}

/* Generate a register with same attributes as REG, but with OFFSET
   added to the REG_OFFSET.  */

rtx
gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
		    int offset)
{
  rtx new_rtx = gen_rtx_REG (mode, regno);

  update_reg_offset (new_rtx, reg, offset);
  return new_rtx;
}

/* Generate a new pseudo-register with the same attributes as REG, but
   with OFFSET added to the REG_OFFSET.  */

rtx
gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
{
  rtx new_rtx = gen_reg_rtx (mode);

  update_reg_offset (new_rtx, reg, offset);
  return new_rtx;
}

/* Adjust REG in-place so that it has mode MODE.  It is assumed that the
   new register is a (possibly paradoxical) lowpart of the old one.  */

void
adjust_reg_mode (rtx reg, enum machine_mode mode)
{
  update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
  PUT_MODE (reg, mode);
}

/* Copy REG's attributes from X, if X has any attributes.  If REG and X
   have different modes, REG is a (possibly paradoxical) lowpart of X.  */

void
set_reg_attrs_from_value (rtx reg, rtx x)
{
  int offset;

  /* Hard registers can be reused for multiple purposes within the same
     function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
     on them is wrong.  */
  if (HARD_REGISTER_P (reg))
    return;

  offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
  if (MEM_P (x))
    {
      if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
	REG_ATTRS (reg)
	  = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
      if (MEM_POINTER (x))
	mark_reg_pointer (reg, 0);
    }
  else if (REG_P (x))
    {
      if (REG_ATTRS (x))
	update_reg_offset (reg, x, offset);
      if (REG_POINTER (x))
	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
    }
}

/* Generate a REG rtx for a new pseudo register, copying the mode
   and attributes from X.  */

rtx
gen_reg_rtx_and_attrs (rtx x)
{
  rtx reg = gen_reg_rtx (GET_MODE (x));
  set_reg_attrs_from_value (reg, x);
  return reg;
}

/* Set the register attributes for registers contained in PARM_RTX.
   Use needed values from memory attributes of MEM.  */

void
set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
{
  if (REG_P (parm_rtx))
    set_reg_attrs_from_value (parm_rtx, mem);
  else if (GET_CODE (parm_rtx) == PARALLEL)
    {
      /* Check for a NULL entry in the first slot, used to indicate that the
	 parameter goes both on the stack and in registers.  */
      int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
      for (; i < XVECLEN (parm_rtx, 0); i++)
	{
	  rtx x = XVECEXP (parm_rtx, 0, i);
	  if (REG_P (XEXP (x, 0)))
	    REG_ATTRS (XEXP (x, 0))
	      = get_reg_attrs (MEM_EXPR (mem),
			       INTVAL (XEXP (x, 1)));
	}
    }
}

/* Set the REG_ATTRS for registers in value X, given that X represents
   decl T.  */

void
set_reg_attrs_for_decl_rtl (tree t, rtx x)
{
  if (GET_CODE (x) == SUBREG)
    {
      gcc_assert (subreg_lowpart_p (x));
      x = SUBREG_REG (x);
    }
  if (REG_P (x))
    REG_ATTRS (x)
      = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
					       DECL_MODE (t)));
  if (GET_CODE (x) == CONCAT)
    {
      if (REG_P (XEXP (x, 0)))
        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
      if (REG_P (XEXP (x, 1)))
	REG_ATTRS (XEXP (x, 1))
	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
    }
  if (GET_CODE (x) == PARALLEL)
    {
      int i, start;

      /* Check for a NULL entry, used to indicate that the parameter goes
	 both on the stack and in registers.  */
      if (XEXP (XVECEXP (x, 0, 0), 0))
	start = 0;
      else
	start = 1;

      for (i = start; i < XVECLEN (x, 0); i++)
	{
	  rtx y = XVECEXP (x, 0, i);
	  if (REG_P (XEXP (y, 0)))
	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
	}
    }
}

/* Assign the RTX X to declaration T.  */

void
set_decl_rtl (tree t, rtx x)
{
  DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
  if (x)
    set_reg_attrs_for_decl_rtl (t, x);
}

/* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
   if the ABI requires the parameter to be passed by reference.  */

void
set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
{
  DECL_INCOMING_RTL (t) = x;
  if (x && !by_reference_p)
    set_reg_attrs_for_decl_rtl (t, x);
}

/* Identify REG (which may be a CONCAT) as a user register.  */

void
mark_user_reg (rtx reg)
{
  if (GET_CODE (reg) == CONCAT)
    {
      REG_USERVAR_P (XEXP (reg, 0)) = 1;
      REG_USERVAR_P (XEXP (reg, 1)) = 1;
    }
  else
    {
      gcc_assert (REG_P (reg));
      REG_USERVAR_P (reg) = 1;
    }
}

/* Identify REG as a probable pointer register and show its alignment
   as ALIGN, if nonzero.  */

void
mark_reg_pointer (rtx reg, int align)
{
  if (! REG_POINTER (reg))
    {
      REG_POINTER (reg) = 1;

      if (align)
	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
    }
  else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
    /* We can no-longer be sure just how aligned this pointer is.  */
    REGNO_POINTER_ALIGN (REGNO (reg)) = align;
}

/* Return 1 plus largest pseudo reg number used in the current function.  */

int
max_reg_num (void)
{
  return reg_rtx_no;
}

/* Return 1 + the largest label number used so far in the current function.  */

int
max_label_num (void)
{
  return label_num;
}

/* Return first label number used in this function (if any were used).  */

int
get_first_label_num (void)
{
  return first_label_num;
}

/* If the rtx for label was created during the expansion of a nested
   function, then first_label_num won't include this label number.
   Fix this now so that array indices work later.  */

void
maybe_set_first_label_num (rtx x)
{
  if (CODE_LABEL_NUMBER (x) < first_label_num)
    first_label_num = CODE_LABEL_NUMBER (x);
}

/* Return a value representing some low-order bits of X, where the number
   of low-order bits is given by MODE.  Note that no conversion is done
   between floating-point and fixed-point values, rather, the bit
   representation is returned.

   This function handles the cases in common between gen_lowpart, below,
   and two variants in cse.c and combine.c.  These are the cases that can
   be safely handled at all points in the compilation.

   If this is not a case we can handle, return 0.  */

rtx
gen_lowpart_common (enum machine_mode mode, rtx x)
{
  int msize = GET_MODE_SIZE (mode);
  int xsize;
  int offset = 0;
  enum machine_mode innermode;

  /* Unfortunately, this routine doesn't take a parameter for the mode of X,
     so we have to make one up.  Yuk.  */
  innermode = GET_MODE (x);
  if (CONST_INT_P (x)
      && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
  else if (innermode == VOIDmode)
    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);

  xsize = GET_MODE_SIZE (innermode);

  gcc_assert (innermode != VOIDmode && innermode != BLKmode);

  if (innermode == mode)
    return x;

  /* MODE must occupy no more words than the mode of X.  */
  if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
      > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
    return 0;

  /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
  if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
    return 0;

  offset = subreg_lowpart_offset (mode, innermode);

  if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
      && (GET_MODE_CLASS (mode) == MODE_INT
	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
    {
      /* If we are getting the low-order part of something that has been
	 sign- or zero-extended, we can either just use the object being
	 extended or make a narrower extension.  If we want an even smaller
	 piece than the size of the object being extended, call ourselves
	 recursively.

	 This case is used mostly by combine and cse.  */

      if (GET_MODE (XEXP (x, 0)) == mode)
	return XEXP (x, 0);
      else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
	return gen_lowpart_common (mode, XEXP (x, 0));
      else if (msize < xsize)
	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
    }
  else if (GET_CODE (x) == SUBREG || REG_P (x)
	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
	   || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
    return simplify_gen_subreg (mode, x, innermode, offset);

  /* Otherwise, we can't do this.  */
  return 0;
}

rtx
gen_highpart (enum machine_mode mode, rtx x)
{
  unsigned int msize = GET_MODE_SIZE (mode);
  rtx result;

  /* This case loses if X is a subreg.  To catch bugs early,
     complain if an invalid MODE is used even in other cases.  */
  gcc_assert (msize <= UNITS_PER_WORD
	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));

  result = simplify_gen_subreg (mode, x, GET_MODE (x),
				subreg_highpart_offset (mode, GET_MODE (x)));
  gcc_assert (result);

  /* simplify_gen_subreg is not guaranteed to return a valid operand for
     the target if we have a MEM.  gen_highpart must return a valid operand,
     emitting code if necessary to do so.  */
  if (MEM_P (result))
    {
      result = validize_mem (result);
      gcc_assert (result);
    }

  return result;
}

/* Like gen_highpart, but accept mode of EXP operand in case EXP can
   be VOIDmode constant.  */
rtx
gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
{
  if (GET_MODE (exp) != VOIDmode)
    {
      gcc_assert (GET_MODE (exp) == innermode);
      return gen_highpart (outermode, exp);
    }
  return simplify_gen_subreg (outermode, exp, innermode,
			      subreg_highpart_offset (outermode, innermode));
}

/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value.  */

unsigned int
subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
{
  unsigned int offset = 0;
  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));

  if (difference > 0)
    {
      if (WORDS_BIG_ENDIAN)
	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
      if (BYTES_BIG_ENDIAN)
	offset += difference % UNITS_PER_WORD;
    }

  return offset;
}

/* Return offset in bytes to get OUTERMODE high part
   of the value in mode INNERMODE stored in memory in target format.  */
unsigned int
subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
{
  unsigned int offset = 0;
  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));

  gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));

  if (difference > 0)
    {
      if (! WORDS_BIG_ENDIAN)
	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
      if (! BYTES_BIG_ENDIAN)
	offset += difference % UNITS_PER_WORD;
    }

  return offset;
}

/* Return 1 iff X, assumed to be a SUBREG,
   refers to the least significant part of its containing reg.
   If X is not a SUBREG, always return 1 (it is its own low part!).  */

int
subreg_lowpart_p (const_rtx x)
{
  if (GET_CODE (x) != SUBREG)
    return 1;
  else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
    return 0;

  return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
	  == SUBREG_BYTE (x));
}

/* Return subword OFFSET of operand OP.
   The word number, OFFSET, is interpreted as the word number starting
   at the low-order address.  OFFSET 0 is the low-order word if not
   WORDS_BIG_ENDIAN, otherwise it is the high-order word.

   If we cannot extract the required word, we return zero.  Otherwise,
   an rtx corresponding to the requested word will be returned.

   VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
   reload has completed, a valid address will always be returned.  After
   reload, if a valid address cannot be returned, we return zero.

   If VALIDATE_ADDRESS is zero, we simply form the required address; validating
   it is the responsibility of the caller.

   MODE is the mode of OP in case it is a CONST_INT.

   ??? This is still rather broken for some cases.  The problem for the
   moment is that all callers of this thing provide no 'goal mode' to
   tell us to work with.  This exists because all callers were written
   in a word based SUBREG world.
   Now use of this function can be deprecated by simplify_subreg in most
   cases.
 */

rtx
operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
{
  if (mode == VOIDmode)
    mode = GET_MODE (op);

  gcc_assert (mode != VOIDmode);

  /* If OP is narrower than a word, fail.  */
  if (mode != BLKmode
      && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
    return 0;

  /* If we want a word outside OP, return zero.  */
  if (mode != BLKmode
      && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
    return const0_rtx;

  /* Form a new MEM at the requested address.  */
  if (MEM_P (op))
    {
      rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);

      if (! validate_address)
	return new_rtx;

      else if (reload_completed)
	{
	  if (! strict_memory_address_addr_space_p (word_mode,
						    XEXP (new_rtx, 0),
						    MEM_ADDR_SPACE (op)))
	    return 0;
	}
      else
	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
    }

  /* Rest can be handled by simplify_subreg.  */
  return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
}

/* Similar to `operand_subword', but never return 0.  If we can't
   extract the required subword, put OP into a register and try again.
   The second attempt must succeed.  We always validate the address in
   this case.

   MODE is the mode of OP, in case it is CONST_INT.  */

rtx
operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
{
  rtx result = operand_subword (op, offset, 1, mode);

  if (result)
    return result;

  if (mode != BLKmode && mode != VOIDmode)
    {
      /* If this is a register which can not be accessed by words, copy it
	 to a pseudo register.  */
      if (REG_P (op))
	op = copy_to_reg (op);
      else
	op = force_reg (mode, op);
    }

  result = operand_subword (op, offset, 1, mode);
  gcc_assert (result);

  return result;
}

/* Returns 1 if both MEM_EXPR can be considered equal
   and 0 otherwise.  */

int
mem_expr_equal_p (const_tree expr1, const_tree expr2)
{
  if (expr1 == expr2)
    return 1;

  if (! expr1 || ! expr2)
    return 0;

  if (TREE_CODE (expr1) != TREE_CODE (expr2))
    return 0;

  return operand_equal_p (expr1, expr2, 0);
}

/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
   bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
   -1 if not known.  */

int
get_mem_align_offset (rtx mem, unsigned int align)
{
  tree expr;
  unsigned HOST_WIDE_INT offset;

  /* This function can't use
     if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
	 || !CONST_INT_P (MEM_OFFSET (mem))
	 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
	     < align))
       return -1;
     else
       return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
     for two reasons:
     - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
       for <variable>.  get_inner_reference doesn't handle it and
       even if it did, the alignment in that case needs to be determined
       from DECL_FIELD_CONTEXT's TYPE_ALIGN.
     - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
       isn't sufficiently aligned, the object it is in might be.  */
  gcc_assert (MEM_P (mem));
  expr = MEM_EXPR (mem);
  if (expr == NULL_TREE
      || MEM_OFFSET (mem) == NULL_RTX
      || !CONST_INT_P (MEM_OFFSET (mem)))
    return -1;

  offset = INTVAL (MEM_OFFSET (mem));
  if (DECL_P (expr))
    {
      if (DECL_ALIGN (expr) < align)
	return -1;
    }
  else if (INDIRECT_REF_P (expr))
    {
      if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
	return -1;
    }
  else if (TREE_CODE (expr) == COMPONENT_REF)
    {
      while (1)
	{
	  tree inner = TREE_OPERAND (expr, 0);
	  tree field = TREE_OPERAND (expr, 1);
	  tree byte_offset = component_ref_field_offset (expr);
	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);

	  if (!byte_offset
	      || !host_integerp (byte_offset, 1)
	      || !host_integerp (bit_offset, 1))
	    return -1;

	  offset += tree_low_cst (byte_offset, 1);
	  offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;

	  if (inner == NULL_TREE)
	    {
	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
		  < (unsigned int) align)
		return -1;
	      break;
	    }
	  else if (DECL_P (inner))
	    {
	      if (DECL_ALIGN (inner) < align)
		return -1;
	      break;
	    }
	  else if (TREE_CODE (inner) != COMPONENT_REF)
	    return -1;
	  expr = inner;
	}
    }
  else
    return -1;

  return offset & ((align / BITS_PER_UNIT) - 1);
}

/* Given REF (a MEM) and T, either the type of X or the expression
   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
   if we are making a new object of this type.  BITPOS is nonzero if
   there is an offset outstanding on T that will be applied later.  */

void
set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
				 HOST_WIDE_INT bitpos)
{
  alias_set_type alias = MEM_ALIAS_SET (ref);
  tree expr = MEM_EXPR (ref);
  rtx offset = MEM_OFFSET (ref);
  rtx size = MEM_SIZE (ref);
  unsigned int align = MEM_ALIGN (ref);
  HOST_WIDE_INT apply_bitpos = 0;
  tree type;

  /* It can happen that type_for_mode was given a mode for which there
     is no language-level type.  In which case it returns NULL, which
     we can see here.  */
  if (t == NULL_TREE)
    return;

  type = TYPE_P (t) ? t : TREE_TYPE (t);
  if (type == error_mark_node)
    return;

  /* If we have already set DECL_RTL = ref, get_alias_set will get the
     wrong answer, as it assumes that DECL_RTL already has the right alias
     info.  Callers should not set DECL_RTL until after the call to
     set_mem_attributes.  */
  gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));

  /* Get the alias set from the expression or type (perhaps using a
     front-end routine) and use it.  */
  alias = get_alias_set (t);

  MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
  MEM_IN_STRUCT_P (ref)
    = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
  MEM_POINTER (ref) = POINTER_TYPE_P (type);

  /* If we are making an object of this type, or if this is a DECL, we know
     that it is a scalar if the type is not an aggregate.  */
  if ((objectp || DECL_P (t))
      && ! AGGREGATE_TYPE_P (type)
      && TREE_CODE (type) != COMPLEX_TYPE)
    MEM_SCALAR_P (ref) = 1;

  /* We can set the alignment from the type if we are making an object,
     this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
  if (objectp || TREE_CODE (t) == INDIRECT_REF
      || TREE_CODE (t) == ALIGN_INDIRECT_REF
      || TYPE_ALIGN_OK (type))
    align = MAX (align, TYPE_ALIGN (type));
  else
    if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
      {
	if (integer_zerop (TREE_OPERAND (t, 1)))
	  /* We don't know anything about the alignment.  */
	  align = BITS_PER_UNIT;
	else
	  align = tree_low_cst (TREE_OPERAND (t, 1), 1);
      }

  /* If the size is known, we can set that.  */
  if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
    size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));

  /* If T is not a type, we may be able to deduce some more information about
     the expression.  */
  if (! TYPE_P (t))
    {
      tree base;
      bool align_computed = false;

      if (TREE_THIS_VOLATILE (t))
	MEM_VOLATILE_P (ref) = 1;

      /* Now remove any conversions: they don't change what the underlying
	 object is.  Likewise for SAVE_EXPR.  */
      while (CONVERT_EXPR_P (t)
	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
	     || TREE_CODE (t) == SAVE_EXPR)
	t = TREE_OPERAND (t, 0);

      /* We may look through structure-like accesses for the purposes of
	 examining TREE_THIS_NOTRAP, but not array-like accesses.  */
      base = t;
      while (TREE_CODE (base) == COMPONENT_REF
	     || TREE_CODE (base) == REALPART_EXPR
	     || TREE_CODE (base) == IMAGPART_EXPR
	     || TREE_CODE (base) == BIT_FIELD_REF)
	base = TREE_OPERAND (base, 0);

      if (DECL_P (base))
	{
	  if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
	    MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
	  else
	    MEM_NOTRAP_P (ref) = 1;
	}
      else
	MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);

      base = get_base_address (base);
      if (base && DECL_P (base)
	  && TREE_READONLY (base)
	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
	{
	  tree base_type = TREE_TYPE (base);
	  gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
		      || DECL_ARTIFICIAL (base));
	  MEM_READONLY_P (ref) = 1;
	}

      /* If this expression uses it's parent's alias set, mark it such
	 that we won't change it.  */
      if (component_uses_parent_alias_set (t))
	MEM_KEEP_ALIAS_SET_P (ref) = 1;

      /* If this is a decl, set the attributes of the MEM from it.  */
      if (DECL_P (t))
	{
	  expr = t;
	  offset = const0_rtx;
	  apply_bitpos = bitpos;
	  size = (DECL_SIZE_UNIT (t)
		  && host_integerp (DECL_SIZE_UNIT (t), 1)
		  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
	  align = DECL_ALIGN (t);
	  align_computed = true;
	}

      /* If this is a constant, we know the alignment.  */
      else if (CONSTANT_CLASS_P (t))
	{
	  align = TYPE_ALIGN (type);
#ifdef CONSTANT_ALIGNMENT
	  align = CONSTANT_ALIGNMENT (t, align);
#endif
	  align_computed = true;
	}

      /* If this is a field reference and not a bit-field, record it.  */
      /* ??? There is some information that can be gleaned from bit-fields,
	 such as the word offset in the structure that might be modified.
	 But skip it for now.  */
      else if (TREE_CODE (t) == COMPONENT_REF
	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
	{
	  expr = t;
	  offset = const0_rtx;
	  apply_bitpos = bitpos;
	  /* ??? Any reason the field size would be different than
	     the size we got from the type?  */
	}

      /* If this is an array reference, look for an outer field reference.  */
      else if (TREE_CODE (t) == ARRAY_REF)
	{
	  tree off_tree = size_zero_node;
	  /* We can't modify t, because we use it at the end of the
	     function.  */
	  tree t2 = t;

	  do
	    {
	      tree index = TREE_OPERAND (t2, 1);
	      tree low_bound = array_ref_low_bound (t2);
	      tree unit_size = array_ref_element_size (t2);

	      /* We assume all arrays have sizes that are a multiple of a byte.
		 First subtract the lower bound, if any, in the type of the
		 index, then convert to sizetype and multiply by the size of
		 the array element.  */
	      if (! integer_zerop (low_bound))
		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
				     index, low_bound);

	      off_tree = size_binop (PLUS_EXPR,
				     size_binop (MULT_EXPR,
						 fold_convert (sizetype,
							       index),
						 unit_size),
				     off_tree);
	      t2 = TREE_OPERAND (t2, 0);
	    }
	  while (TREE_CODE (t2) == ARRAY_REF);

	  if (DECL_P (t2))
	    {
	      expr = t2;
	      offset = NULL;
	      if (host_integerp (off_tree, 1))
		{
		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
		  align = DECL_ALIGN (t2);
		  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
	            align = aoff;
		  align_computed = true;
		  offset = GEN_INT (ioff);
		  apply_bitpos = bitpos;
		}
	    }
	  else if (TREE_CODE (t2) == COMPONENT_REF)
	    {
	      expr = t2;
	      offset = NULL;
	      if (host_integerp (off_tree, 1))
		{
		  offset = GEN_INT (tree_low_cst (off_tree, 1));
		  apply_bitpos = bitpos;
		}
	      /* ??? Any reason the field size would be different than
		 the size we got from the type?  */
	    }

	  /* If this is an indirect reference, record it.  */
	  else if (TREE_CODE (t) == INDIRECT_REF
		   || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
	    {
	      expr = t;
	      offset = const0_rtx;
	      apply_bitpos = bitpos;
	    }
	}

      /* If this is an indirect reference, record it.  */
      else if (TREE_CODE (t) == INDIRECT_REF
	       || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
	{
	  expr = t;
	  offset = const0_rtx;
	  apply_bitpos = bitpos;
	}

      if (!align_computed && !INDIRECT_REF_P (t))
	{
	  unsigned int obj_align
	    = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
	  align = MAX (align, obj_align);
	}
    }

  /* If we modified OFFSET based on T, then subtract the outstanding
     bit position offset.  Similarly, increase the size of the accessed
     object to contain the negative offset.  */
  if (apply_bitpos)
    {
      offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
      if (size)
	size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
    }

  if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
    {
      /* Force EXPR and OFFSET to NULL, since we don't know exactly what
	 we're overlapping.  */
      offset = NULL;
      expr = NULL;
    }

  /* Now set the attributes we computed above.  */
  MEM_ATTRS (ref)
    = get_mem_attrs (alias, expr, offset, size, align,
		     TYPE_ADDR_SPACE (type), GET_MODE (ref));

  /* If this is already known to be a scalar or aggregate, we are done.  */
  if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
    return;

  /* If it is a reference into an aggregate, this is part of an aggregate.
     Otherwise we don't know.  */
  else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
	   || TREE_CODE (t) == ARRAY_RANGE_REF
	   || TREE_CODE (t) == BIT_FIELD_REF)
    MEM_IN_STRUCT_P (ref) = 1;
}

void
set_mem_attributes (rtx ref, tree t, int objectp)
{
  set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
}

/* Set the alias set of MEM to SET.  */

void
set_mem_alias_set (rtx mem, alias_set_type set)
{
#ifdef ENABLE_CHECKING
  /* If the new and old alias sets don't conflict, something is wrong.  */
  gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
#endif

  MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
				   MEM_SIZE (mem), MEM_ALIGN (mem),
				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
}

/* Set the address space of MEM to ADDRSPACE (target-defined).  */

void
set_mem_addr_space (rtx mem, addr_space_t addrspace)
{
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
				   MEM_OFFSET (mem), MEM_SIZE (mem),
				   MEM_ALIGN (mem), addrspace, GET_MODE (mem));
}

/* Set the alignment of MEM to ALIGN bits.  */

void
set_mem_align (rtx mem, unsigned int align)
{
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
				   MEM_OFFSET (mem), MEM_SIZE (mem), align,
				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
}

/* Set the expr for MEM to EXPR.  */

void
set_mem_expr (rtx mem, tree expr)
{
  MEM_ATTRS (mem)
    = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
		     MEM_SIZE (mem), MEM_ALIGN (mem),
		     MEM_ADDR_SPACE (mem), GET_MODE (mem));
}

/* Set the offset of MEM to OFFSET.  */

void
set_mem_offset (rtx mem, rtx offset)
{
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
				   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
}

/* Set the size of MEM to SIZE.  */

void
set_mem_size (rtx mem, rtx size)
{
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
				   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
				   MEM_ADDR_SPACE (mem), GET_MODE (mem));
}

/* Return a memory reference like MEMREF, but with its mode changed to MODE
   and its address changed to ADDR.  (VOIDmode means don't change the mode.
   NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
   returned memory location is required to be valid.  The memory
   attributes are not changed.  */

static rtx
change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
{
  addr_space_t as;
  rtx new_rtx;

  gcc_assert (MEM_P (memref));
  as = MEM_ADDR_SPACE (memref);
  if (mode == VOIDmode)
    mode = GET_MODE (memref);
  if (addr == 0)
    addr = XEXP (memref, 0);
  if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
      && (!validate || memory_address_addr_space_p (mode, addr, as)))
    return memref;

  if (validate)
    {
      if (reload_in_progress || reload_completed)
	gcc_assert (memory_address_addr_space_p (mode, addr, as));
      else
	addr = memory_address_addr_space (mode, addr, as);
    }

  if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
    return memref;

  new_rtx = gen_rtx_MEM (mode, addr);
  MEM_COPY_ATTRIBUTES (new_rtx, memref);
  return new_rtx;
}

/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
   way we are changing MEMREF, so we only preserve the alias set.  */

rtx
change_address (rtx memref, enum machine_mode mode, rtx addr)
{
  rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
  enum machine_mode mmode = GET_MODE (new_rtx);
  unsigned int align;

  size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
  align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);

  /* If there are no changes, just return the original memory reference.  */
  if (new_rtx == memref)
    {
      if (MEM_ATTRS (memref) == 0
	  || (MEM_EXPR (memref) == NULL
	      && MEM_OFFSET (memref) == NULL
	      && MEM_SIZE (memref) == size
	      && MEM_ALIGN (memref) == align))
	return new_rtx;

      new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
      MEM_COPY_ATTRIBUTES (new_rtx, memref);
    }

  MEM_ATTRS (new_rtx)
    = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
		     MEM_ADDR_SPACE (memref), mmode);

  return new_rtx;
}

/* Return a memory reference like MEMREF, but with its mode changed
   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
   nonzero, the memory address is forced to be valid.
   If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
   and caller is responsible for adjusting MEMREF base register.  */

rtx
adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
		  int validate, int adjust)
{
  rtx addr = XEXP (memref, 0);
  rtx new_rtx;
  rtx memoffset = MEM_OFFSET (memref);
  rtx size = 0;
  unsigned int memalign = MEM_ALIGN (memref);
  addr_space_t as = MEM_ADDR_SPACE (memref);
  enum machine_mode address_mode = targetm.addr_space.address_mode (as);
  int pbits;

  /* If there are no changes, just return the original memory reference.  */
  if (mode == GET_MODE (memref) && !offset
      && (!validate || memory_address_addr_space_p (mode, addr, as)))
    return memref;

  /* ??? Prefer to create garbage instead of creating shared rtl.
     This may happen even if offset is nonzero -- consider
     (plus (plus reg reg) const_int) -- so do this always.  */
  addr = copy_rtx (addr);

  /* Convert a possibly large offset to a signed value within the
     range of the target address space.  */
  pbits = GET_MODE_BITSIZE (address_mode);
  if (HOST_BITS_PER_WIDE_INT > pbits)
    {
      int shift = HOST_BITS_PER_WIDE_INT - pbits;
      offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
		>> shift);
    }

  if (adjust)
    {
      /* If MEMREF is a LO_SUM and the offset is within the alignment of the
	 object, we can merge it into the LO_SUM.  */
      if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
	  && offset >= 0
	  && (unsigned HOST_WIDE_INT) offset
	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
			       plus_constant (XEXP (addr, 1), offset));
      else
	addr = plus_constant (addr, offset);
    }

  new_rtx = change_address_1 (memref, mode, addr, validate);

  /* If the address is a REG, change_address_1 rightfully returns memref,
     but this would destroy memref's MEM_ATTRS.  */
  if (new_rtx == memref && offset != 0)
    new_rtx = copy_rtx (new_rtx);

  /* Compute the new values of the memory attributes due to this adjustment.
     We add the offsets and update the alignment.  */
  if (memoffset)
    memoffset = GEN_INT (offset + INTVAL (memoffset));

  /* Compute the new alignment by taking the MIN of the alignment and the
     lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
     if zero.  */
  if (offset != 0)
    memalign
      = MIN (memalign,
	     (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);

  /* We can compute the size in a number of ways.  */
  if (GET_MODE (new_rtx) != BLKmode)
    size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
  else if (MEM_SIZE (memref))
    size = plus_constant (MEM_SIZE (memref), -offset);

  MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
				       memoffset, size, memalign, as,
				       GET_MODE (new_rtx));

  /* At some point, we should validate that this offset is within the object,
     if all the appropriate values are known.  */
  return new_rtx;
}

/* Return a memory reference like MEMREF, but with its mode changed
   to MODE and its address changed to ADDR, which is assumed to be
   MEMREF offset by OFFSET bytes.  If VALIDATE is
   nonzero, the memory address is forced to be valid.  */

rtx
adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
			     HOST_WIDE_INT offset, int validate)
{
  memref = change_address_1 (memref, VOIDmode, addr, validate);
  return adjust_address_1 (memref, mode, offset, validate, 0);
}

/* Return a memory reference like MEMREF, but whose address is changed by
   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
   known to be in OFFSET (possibly 1).  */

rtx
offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
{
  rtx new_rtx, addr = XEXP (memref, 0);
  addr_space_t as = MEM_ADDR_SPACE (memref);
  enum machine_mode address_mode = targetm.addr_space.address_mode (as);

  new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);

  /* At this point we don't know _why_ the address is invalid.  It
     could have secondary memory references, multiplies or anything.

     However, if we did go and rearrange things, we can wind up not
     being able to recognize the magic around pic_offset_table_rtx.
     This stuff is fragile, and is yet another example of why it is
     bad to expose PIC machinery too early.  */
  if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
      && GET_CODE (addr) == PLUS
      && XEXP (addr, 0) == pic_offset_table_rtx)
    {
      addr = force_reg (GET_MODE (addr), addr);
      new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
    }

  update_temp_slot_address (XEXP (memref, 0), new_rtx);
  new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);

  /* If there are no changes, just return the original memory reference.  */
  if (new_rtx == memref)
    return new_rtx;

  /* Update the alignment to reflect the offset.  Reset the offset, which
     we don't know.  */
  MEM_ATTRS (new_rtx)
    = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
		     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
		     as, GET_MODE (new_rtx));
  return new_rtx;
}

/* Return a memory reference like MEMREF, but with its address changed to
   ADDR.  The caller is asserting that the actual piece of memory pointed
   to is the same, just the form of the address is being changed, such as
   by putting something into a register.  */

rtx
replace_equiv_address (rtx memref, rtx addr)
{
  /* change_address_1 copies the memory attribute structure without change
     and that's exactly what we want here.  */
  update_temp_slot_address (XEXP (memref, 0), addr);
  return change_address_1 (memref, VOIDmode, addr, 1);
}

/* Likewise, but the reference is not required to be valid.  */

rtx
replace_equiv_address_nv (rtx memref, rtx addr)
{
  return change_address_1 (memref, VOIDmode, addr, 0);
}

/* Return a memory reference like MEMREF, but with its mode widened to
   MODE and offset by OFFSET.  This would be used by targets that e.g.
   cannot issue QImode memory operations and have to use SImode memory
   operations plus masking logic.  */

rtx
widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
{
  rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
  tree expr = MEM_EXPR (new_rtx);
  rtx memoffset = MEM_OFFSET (new_rtx);
  unsigned int size = GET_MODE_SIZE (mode);

  /* If there are no changes, just return the original memory reference.  */
  if (new_rtx == memref)
    return new_rtx;

  /* If we don't know what offset we were at within the expression, then
     we can't know if we've overstepped the bounds.  */
  if (! memoffset)
    expr = NULL_TREE;

  while (expr)
    {
      if (TREE_CODE (expr) == COMPONENT_REF)
	{
	  tree field = TREE_OPERAND (expr, 1);
	  tree offset = component_ref_field_offset (expr);

	  if (! DECL_SIZE_UNIT (field))
	    {
	      expr = NULL_TREE;
	      break;
	    }

	  /* Is the field at least as large as the access?  If so, ok,
	     otherwise strip back to the containing structure.  */
	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
	      && INTVAL (memoffset) >= 0)
	    break;

	  if (! host_integerp (offset, 1))
	    {
	      expr = NULL_TREE;
	      break;
	    }

	  expr = TREE_OPERAND (expr, 0);
	  memoffset
	    = (GEN_INT (INTVAL (memoffset)
			+ tree_low_cst (offset, 1)
			+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
			   / BITS_PER_UNIT)));
	}
      /* Similarly for the decl.  */
      else if (DECL_P (expr)
	       && DECL_SIZE_UNIT (expr)
	       && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
	       && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
	       && (! memoffset || INTVAL (memoffset) >= 0))
	break;
      else
	{
	  /* The widened memory access overflows the expression, which means
	     that it could alias another expression.  Zap it.  */
	  expr = NULL_TREE;
	  break;
	}
    }

  if (! expr)
    memoffset = NULL_RTX;

  /* The widened memory may alias other stuff, so zap the alias set.  */
  /* ??? Maybe use get_alias_set on any remaining expression.  */

  MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
				       MEM_ALIGN (new_rtx),
				       MEM_ADDR_SPACE (new_rtx), mode);

  return new_rtx;
}

/* A fake decl that is used as the MEM_EXPR of spill slots.  */
static GTY(()) tree spill_slot_decl;

tree
get_spill_slot_decl (bool force_build_p)
{
  tree d = spill_slot_decl;
  rtx rd;

  if (d || !force_build_p)
    return d;

  d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
  DECL_ARTIFICIAL (d) = 1;
  DECL_IGNORED_P (d) = 1;
  TREE_USED (d) = 1;
  TREE_THIS_NOTRAP (d) = 1;
  spill_slot_decl = d;

  rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
  MEM_NOTRAP_P (rd) = 1;
  MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
				  NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
  SET_DECL_RTL (d, rd);

  return d;
}

/* Given MEM, a result from assign_stack_local, fill in the memory
   attributes as appropriate for a register allocator spill slot.
   These slots are not aliasable by other memory.  We arrange for
   them all to use a single MEM_EXPR, so that the aliasing code can
   work properly in the case of shared spill slots.  */

void
set_mem_attrs_for_spill (rtx mem)
{
  alias_set_type alias;
  rtx addr, offset;
  tree expr;

  expr = get_spill_slot_decl (true);
  alias = MEM_ALIAS_SET (DECL_RTL (expr));

  /* We expect the incoming memory to be of the form:
	(mem:MODE (plus (reg sfp) (const_int offset)))
     with perhaps the plus missing for offset = 0.  */
  addr = XEXP (mem, 0);
  offset = const0_rtx;
  if (GET_CODE (addr) == PLUS
      && CONST_INT_P (XEXP (addr, 1)))
    offset = XEXP (addr, 1);

  MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
				   MEM_SIZE (mem), MEM_ALIGN (mem),
				   ADDR_SPACE_GENERIC, GET_MODE (mem));
  MEM_NOTRAP_P (mem) = 1;
}

/* Return a newly created CODE_LABEL rtx with a unique label number.  */

rtx
gen_label_rtx (void)
{
  return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
			     NULL, label_num++, NULL);
}

/* For procedure integration.  */

/* Install new pointers to the first and last insns in the chain.
   Also, set cur_insn_uid to one higher than the last in use.
   Used for an inline-procedure after copying the insn chain.  */

void
set_new_first_and_last_insn (rtx first, rtx last)
{
  rtx insn;

  first_insn = first;
  last_insn = last;
  cur_insn_uid = 0;

  if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
    {
      int debug_count = 0;

      cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
      cur_debug_insn_uid = 0;

      for (insn = first; insn; insn = NEXT_INSN (insn))
	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
	else
	  {
	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
	    if (DEBUG_INSN_P (insn))
	      debug_count++;
	  }

      if (debug_count)
	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
      else
	cur_debug_insn_uid++;
    }
  else
    for (insn = first; insn; insn = NEXT_INSN (insn))
      cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));

  cur_insn_uid++;
}

/* Go through all the RTL insn bodies and copy any invalid shared
   structure.  This routine should only be called once.  */

static void
unshare_all_rtl_1 (rtx insn)
{
  /* Unshare just about everything else.  */
  unshare_all_rtl_in_chain (insn);

  /* Make sure the addresses of stack slots found outside the insn chain
     (such as, in DECL_RTL of a variable) are not shared
     with the insn chain.

     This special care is necessary when the stack slot MEM does not
     actually appear in the insn chain.  If it does appear, its address
     is unshared from all else at that point.  */
  stack_slot_list = copy_rtx_if_shared (stack_slot_list);
}

/* Go through all the RTL insn bodies and copy any invalid shared
   structure, again.  This is a fairly expensive thing to do so it
   should be done sparingly.  */

void
unshare_all_rtl_again (rtx insn)
{
  rtx p;
  tree decl;

  for (p = insn; p; p = NEXT_INSN (p))
    if (INSN_P (p))
      {
	reset_used_flags (PATTERN (p));
	reset_used_flags (REG_NOTES (p));
      }

  /* Make sure that virtual stack slots are not shared.  */
  set_used_decls (DECL_INITIAL (cfun->decl));

  /* Make sure that virtual parameters are not shared.  */
  for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
    set_used_flags (DECL_RTL (decl));

  reset_used_flags (stack_slot_list);

  unshare_all_rtl_1 (insn);
}

unsigned int
unshare_all_rtl (void)
{
  unshare_all_rtl_1 (get_insns ());
  return 0;
}

struct rtl_opt_pass pass_unshare_all_rtl =
{
 {
  RTL_PASS,
  "unshare",                            /* name */
  NULL,                                 /* gate */
  unshare_all_rtl,                      /* execute */
  NULL,                                 /* sub */
  NULL,                                 /* next */
  0,                                    /* static_pass_number */
  TV_NONE,                              /* tv_id */
  0,                                    /* properties_required */
  0,                                    /* properties_provided */
  0,                                    /* properties_destroyed */
  0,                                    /* todo_flags_start */
  TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
 }
};


/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
   Recursively does the same for subexpressions.  */

static void
verify_rtx_sharing (rtx orig, rtx insn)
{
  rtx x = orig;
  int i;
  enum rtx_code code;
  const char *format_ptr;

  if (x == 0)
    return;

  code = GET_CODE (x);

  /* These types may be freely shared.  */

  switch (code)
    {
    case REG:
    case DEBUG_EXPR:
    case VALUE:
    case CONST_INT:
    case CONST_DOUBLE:
    case CONST_FIXED:
    case CONST_VECTOR:
    case SYMBOL_REF:
    case LABEL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
    case SCRATCH:
      return;
      /* SCRATCH must be shared because they represent distinct values.  */
    case CLOBBER:
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
	return;
      break;

    case CONST:
      if (shared_const_p (orig))
	return;
      break;

    case MEM:
      /* A MEM is allowed to be shared if its address is constant.  */
      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
	  || reload_completed || reload_in_progress)
	return;

      break;

    default:
      break;
    }

  /* This rtx may not be shared.  If it has already been seen,
     replace it with a copy of itself.  */
#ifdef ENABLE_CHECKING
  if (RTX_FLAG (x, used))
    {
      error ("invalid rtl sharing found in the insn");
      debug_rtx (insn);
      error ("shared rtx");
      debug_rtx (x);
      internal_error ("internal consistency failure");
    }
#endif
  gcc_assert (!RTX_FLAG (x, used));

  RTX_FLAG (x, used) = 1;

  /* Now scan the subexpressions recursively.  */

  format_ptr = GET_RTX_FORMAT (code);

  for (i = 0; i < GET_RTX_LENGTH (code); i++)
    {
      switch (*format_ptr++)
	{
	case 'e':
	  verify_rtx_sharing (XEXP (x, i), insn);
	  break;

	case 'E':
	  if (XVEC (x, i) != NULL)
	    {
	      int j;
	      int len = XVECLEN (x, i);

	      for (j = 0; j < len; j++)
		{
		  /* We allow sharing of ASM_OPERANDS inside single
		     instruction.  */
		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
			  == ASM_OPERANDS))
		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
		  else
		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
		}
	    }
	  break;
	}
    }
  return;
}

/* Go through all the RTL insn bodies and check that there is no unexpected
   sharing in between the subexpressions.  */

void
verify_rtl_sharing (void)
{
  rtx p;

  for (p = get_insns (); p; p = NEXT_INSN (p))
    if (INSN_P (p))
      {
	reset_used_flags (PATTERN (p));
	reset_used_flags (REG_NOTES (p));
	if (GET_CODE (PATTERN (p)) == SEQUENCE)
	  {
	    int i;
	    rtx q, sequence = PATTERN (p);

	    for (i = 0; i < XVECLEN (sequence, 0); i++)
	      {
		q = XVECEXP (sequence, 0, i);
		gcc_assert (INSN_P (q));
		reset_used_flags (PATTERN (q));
		reset_used_flags (REG_NOTES (q));
	      }
	  }
      }

  for (p = get_insns (); p; p = NEXT_INSN (p))
    if (INSN_P (p))
      {
	verify_rtx_sharing (PATTERN (p), p);
	verify_rtx_sharing (REG_NOTES (p), p);
      }
}

/* Go through all the RTL insn bodies and copy any invalid shared structure.
   Assumes the mark bits are cleared at entry.  */

void
unshare_all_rtl_in_chain (rtx insn)
{
  for (; insn; insn = NEXT_INSN (insn))
    if (INSN_P (insn))
      {
	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
      }
}

/* Go through all virtual stack slots of a function and mark them as
   shared.  We never replace the DECL_RTLs themselves with a copy,
   but expressions mentioned into a DECL_RTL cannot be shared with
   expressions in the instruction stream.

   Note that reload may convert pseudo registers into memories in-place.
   Pseudo registers are always shared, but MEMs never are.  Thus if we
   reset the used flags on MEMs in the instruction stream, we must set
   them again on MEMs that appear in DECL_RTLs.  */

static void
set_used_decls (tree blk)
{
  tree t;

  /* Mark decls.  */
  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
    if (DECL_RTL_SET_P (t))
      set_used_flags (DECL_RTL (t));

  /* Now process sub-blocks.  */
  for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
    set_used_decls (t);
}

/* Mark ORIG as in use, and return a copy of it if it was already in use.
   Recursively does the same for subexpressions.  Uses
   copy_rtx_if_shared_1 to reduce stack space.  */

rtx
copy_rtx_if_shared (rtx orig)
{
  copy_rtx_if_shared_1 (&orig);
  return orig;
}

/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
   use.  Recursively does the same for subexpressions.  */

static void
copy_rtx_if_shared_1 (rtx *orig1)
{
  rtx x;
  int i;
  enum rtx_code code;
  rtx *last_ptr;
  const char *format_ptr;
  int copied = 0;
  int length;

  /* Repeat is used to turn tail-recursion into iteration.  */
repeat:
  x = *orig1;

  if (x == 0)
    return;

  code = GET_CODE (x);

  /* These types may be freely shared.  */

  switch (code)
    {
    case REG:
    case DEBUG_EXPR:
    case VALUE:
    case CONST_INT:
    case CONST_DOUBLE:
    case CONST_FIXED:
    case CONST_VECTOR:
    case SYMBOL_REF:
    case LABEL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
    case SCRATCH:
      /* SCRATCH must be shared because they represent distinct values.  */
      return;
    case CLOBBER:
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
	return;
      break;

    case CONST:
      if (shared_const_p (x))
	return;
      break;

    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case NOTE:
    case BARRIER:
      /* The chain of insns is not being copied.  */
      return;

    default:
      break;
    }

  /* This rtx may not be shared.  If it has already been seen,
     replace it with a copy of itself.  */

  if (RTX_FLAG (x, used))
    {
      x = shallow_copy_rtx (x);
      copied = 1;
    }
  RTX_FLAG (x, used) = 1;

  /* Now scan the subexpressions recursively.
     We can store any replaced subexpressions directly into X
     since we know X is not shared!  Any vectors in X
     must be copied if X was copied.  */

  format_ptr = GET_RTX_FORMAT (code);
  length = GET_RTX_LENGTH (code);
  last_ptr = NULL;

  for (i = 0; i < length; i++)
    {
      switch (*format_ptr++)
	{
	case 'e':
          if (last_ptr)
            copy_rtx_if_shared_1 (last_ptr);
	  last_ptr = &XEXP (x, i);
	  break;

	case 'E':
	  if (XVEC (x, i) != NULL)
	    {
	      int j;
	      int len = XVECLEN (x, i);

              /* Copy the vector iff I copied the rtx and the length
		 is nonzero.  */
	      if (copied && len > 0)
		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);

              /* Call recursively on all inside the vector.  */
	      for (j = 0; j < len; j++)
                {
		  if (last_ptr)
		    copy_rtx_if_shared_1 (last_ptr);
                  last_ptr = &XVECEXP (x, i, j);
                }
	    }
	  break;
	}
    }
  *orig1 = x;
  if (last_ptr)
    {
      orig1 = last_ptr;
      goto repeat;
    }
  return;
}

/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
   to look for shared sub-parts.  */

void
reset_used_flags (rtx x)
{
  int i, j;
  enum rtx_code code;
  const char *format_ptr;
  int length;

  /* Repeat is used to turn tail-recursion into iteration.  */
repeat:
  if (x == 0)
    return;

  code = GET_CODE (x);

  /* These types may be freely shared so we needn't do any resetting
     for them.  */

  switch (code)
    {
    case REG:
    case DEBUG_EXPR:
    case VALUE:
    case CONST_INT:
    case CONST_DOUBLE:
    case CONST_FIXED:
    case CONST_VECTOR:
    case SYMBOL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
      return;

    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case NOTE:
    case LABEL_REF:
    case BARRIER:
      /* The chain of insns is not being copied.  */
      return;

    default:
      break;
    }

  RTX_FLAG (x, used) = 0;

  format_ptr = GET_RTX_FORMAT (code);
  length = GET_RTX_LENGTH (code);

  for (i = 0; i < length; i++)
    {
      switch (*format_ptr++)
	{
	case 'e':
          if (i == length-1)
            {
              x = XEXP (x, i);
	      goto repeat;
            }
	  reset_used_flags (XEXP (x, i));
	  break;

	case 'E':
	  for (j = 0; j < XVECLEN (x, i); j++)
	    reset_used_flags (XVECEXP (x, i, j));
	  break;
	}
    }
}

/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
   to look for shared sub-parts.  */

void
set_used_flags (rtx x)
{
  int i, j;
  enum rtx_code code;
  const char *format_ptr;

  if (x == 0)
    return;

  code = GET_CODE (x);

  /* These types may be freely shared so we needn't do any resetting
     for them.  */

  switch (code)
    {
    case REG:
    case DEBUG_EXPR:
    case VALUE:
    case CONST_INT:
    case CONST_DOUBLE:
    case CONST_FIXED:
    case CONST_VECTOR:
    case SYMBOL_REF:
    case CODE_LABEL:
    case PC:
    case CC0:
      return;

    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case NOTE:
    case LABEL_REF:
    case BARRIER:
      /* The chain of insns is not being copied.  */
      return;

    default:
      break;
    }

  RTX_FLAG (x, used) = 1;

  format_ptr = GET_RTX_FORMAT (code);
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
    {
      switch (*format_ptr++)
	{
	case 'e':
	  set_used_flags (XEXP (x, i));
	  break;

	case 'E':
	  for (j = 0; j < XVECLEN (x, i); j++)
	    set_used_flags (XVECEXP (x, i, j));
	  break;
	}
    }
}

/* Copy X if necessary so that it won't be altered by changes in OTHER.
   Return X or the rtx for the pseudo reg the value of X was copied into.
   OTHER must be valid as a SET_DEST.  */

rtx
make_safe_from (rtx x, rtx other)
{
  while (1)
    switch (GET_CODE (other))
      {
      case SUBREG:
	other = SUBREG_REG (other);
	break;
      case STRICT_LOW_PART:
      case SIGN_EXTEND:
      case ZERO_EXTEND:
	other = XEXP (other, 0);
	break;
      default:
	goto done;
      }
 done:
  if ((MEM_P (other)
       && ! CONSTANT_P (x)
       && !REG_P (x)
       && GET_CODE (x) != SUBREG)
      || (REG_P (other)
	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
	      || reg_mentioned_p (other, x))))
    {
      rtx temp = gen_reg_rtx (GET_MODE (x));
      emit_move_insn (temp, x);
      return temp;
    }
  return x;
}

/* Emission of insns (adding them to the doubly-linked list).  */

/* Return the first insn of the current sequence or current function.  */

rtx
get_insns (void)
{
  return first_insn;
}

/* Specify a new insn as the first in the chain.  */

void
set_first_insn (rtx insn)
{
  gcc_assert (!PREV_INSN (insn));
  first_insn = insn;
}

/* Return the last insn emitted in current sequence or current function.  */

rtx
get_last_insn (void)
{
  return last_insn;
}

/* Specify a new insn as the last in the chain.  */

void
set_last_insn (rtx insn)
{
  gcc_assert (!NEXT_INSN (insn));
  last_insn = insn;
}

/* Return the last insn emitted, even if it is in a sequence now pushed.  */

rtx
get_last_insn_anywhere (void)
{
  struct sequence_stack *stack;
  if (last_insn)
    return last_insn;
  for (stack = seq_stack; stack; stack = stack->next)
    if (stack->last != 0)
      return stack->last;
  return 0;
}

/* Return the first nonnote insn emitted in current sequence or current
   function.  This routine looks inside SEQUENCEs.  */

rtx
get_first_nonnote_insn (void)
{
  rtx insn = first_insn;

  if (insn)
    {
      if (NOTE_P (insn))
	for (insn = next_insn (insn);
	     insn && NOTE_P (insn);
	     insn = next_insn (insn))
	  continue;
      else
	{
	  if (NONJUMP_INSN_P (insn)
	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
	    insn = XVECEXP (PATTERN (insn), 0, 0);
	}
    }

  return insn;
}

/* Return the last nonnote insn emitted in current sequence or current
   function.  This routine looks inside SEQUENCEs.  */

rtx
get_last_nonnote_insn (void)
{
  rtx insn = last_insn;

  if (insn)
    {
      if (NOTE_P (insn))
	for (insn = previous_insn (insn);
	     insn && NOTE_P (insn);
	     insn = previous_insn (insn))
	  continue;
      else
	{
	  if (NONJUMP_INSN_P (insn)
	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
	    insn = XVECEXP (PATTERN (insn), 0,
			    XVECLEN (PATTERN (insn), 0) - 1);
	}
    }

  return insn;
}

/* Return a number larger than any instruction's uid in this function.  */

int
get_max_uid (void)
{
  return cur_insn_uid;
}

/* Return the number of actual (non-debug) insns emitted in this
   function.  */

int
get_max_insn_count (void)
{
  int n = cur_insn_uid;

  /* The table size must be stable across -g, to avoid codegen
     differences due to debug insns, and not be affected by
     -fmin-insn-uid, to avoid excessive table size and to simplify
     debugging of -fcompare-debug failures.  */
  if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
    n -= cur_debug_insn_uid;
  else
    n -= MIN_NONDEBUG_INSN_UID;

  return n;
}


/* Return the next insn.  If it is a SEQUENCE, return the first insn
   of the sequence.  */

rtx
next_insn (rtx insn)
{
  if (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn && NONJUMP_INSN_P (insn)
	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
	insn = XVECEXP (PATTERN (insn), 0, 0);
    }

  return insn;
}

/* Return the previous insn.  If it is a SEQUENCE, return the last insn
   of the sequence.  */

rtx
previous_insn (rtx insn)
{
  if (insn)
    {
      insn = PREV_INSN (insn);
      if (insn && NONJUMP_INSN_P (insn)
	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
    }

  return insn;
}

/* Return the next insn after INSN that is not a NOTE.  This routine does not
   look inside SEQUENCEs.  */

rtx
next_nonnote_insn (rtx insn)
{
  while (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn == 0 || !NOTE_P (insn))
	break;
    }

  return insn;
}

/* Return the next insn after INSN that is not a NOTE, but stop the
   search before we enter another basic block.  This routine does not
   look inside SEQUENCEs.  */

rtx
next_nonnote_insn_bb (rtx insn)
{
  while (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn == 0 || !NOTE_P (insn))
	break;
      if (NOTE_INSN_BASIC_BLOCK_P (insn))
	return NULL_RTX;
    }

  return insn;
}

/* Return the previous insn before INSN that is not a NOTE.  This routine does
   not look inside SEQUENCEs.  */

rtx
prev_nonnote_insn (rtx insn)
{
  while (insn)
    {
      insn = PREV_INSN (insn);
      if (insn == 0 || !NOTE_P (insn))
	break;
    }

  return insn;
}

/* Return the previous insn before INSN that is not a NOTE, but stop
   the search before we enter another basic block.  This routine does
   not look inside SEQUENCEs.  */

rtx
prev_nonnote_insn_bb (rtx insn)
{
  while (insn)
    {
      insn = PREV_INSN (insn);
      if (insn == 0 || !NOTE_P (insn))
	break;
      if (NOTE_INSN_BASIC_BLOCK_P (insn))
	return NULL_RTX;
    }

  return insn;
}

/* Return the next insn after INSN that is not a DEBUG_INSN.  This
   routine does not look inside SEQUENCEs.  */

rtx
next_nondebug_insn (rtx insn)
{
  while (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn == 0 || !DEBUG_INSN_P (insn))
	break;
    }

  return insn;
}

/* Return the previous insn before INSN that is not a DEBUG_INSN.
   This routine does not look inside SEQUENCEs.  */

rtx
prev_nondebug_insn (rtx insn)
{
  while (insn)
    {
      insn = PREV_INSN (insn);
      if (insn == 0 || !DEBUG_INSN_P (insn))
	break;
    }

  return insn;
}

/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
   or 0, if there is none.  This routine does not look inside
   SEQUENCEs.  */

rtx
next_real_insn (rtx insn)
{
  while (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn == 0 || INSN_P (insn))
	break;
    }

  return insn;
}

/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
   or 0, if there is none.  This routine does not look inside
   SEQUENCEs.  */

rtx
prev_real_insn (rtx insn)
{
  while (insn)
    {
      insn = PREV_INSN (insn);
      if (insn == 0 || INSN_P (insn))
	break;
    }

  return insn;
}

/* Return the last CALL_INSN in the current list, or 0 if there is none.
   This routine does not look inside SEQUENCEs.  */

rtx
last_call_insn (void)
{
  rtx insn;

  for (insn = get_last_insn ();
       insn && !CALL_P (insn);
       insn = PREV_INSN (insn))
    ;

  return insn;
}

/* Find the next insn after INSN that really does something.  This routine
   does not look inside SEQUENCEs.  After reload this also skips over
   standalone USE and CLOBBER insn.  */

int
active_insn_p (const_rtx insn)
{
  return (CALL_P (insn) || JUMP_P (insn)
	  || (NONJUMP_INSN_P (insn)
	      && (! reload_completed
		  || (GET_CODE (PATTERN (insn)) != USE
		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
}

rtx
next_active_insn (rtx insn)
{
  while (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn == 0 || active_insn_p (insn))
	break;
    }

  return insn;
}

/* Find the last insn before INSN that really does something.  This routine
   does not look inside SEQUENCEs.  After reload this also skips over
   standalone USE and CLOBBER insn.  */

rtx
prev_active_insn (rtx insn)
{
  while (insn)
    {
      insn = PREV_INSN (insn);
      if (insn == 0 || active_insn_p (insn))
	break;
    }

  return insn;
}

/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */

rtx
next_label (rtx insn)
{
  while (insn)
    {
      insn = NEXT_INSN (insn);
      if (insn == 0 || LABEL_P (insn))
	break;
    }

  return insn;
}

/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */

rtx
prev_label (rtx insn)
{
  while (insn)
    {
      insn = PREV_INSN (insn);
      if (insn == 0 || LABEL_P (insn))
	break;
    }

  return insn;
}

/* Return the last label to mark the same position as LABEL.  Return null
   if LABEL itself is null.  */

rtx
skip_consecutive_labels (rtx label)
{
  rtx insn;

  for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
    if (LABEL_P (insn))
      label = insn;

  return label;
}

#ifdef HAVE_cc0
/* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
   and REG_CC_USER notes so we can find it.  */

void
link_cc0_insns (rtx insn)
{
  rtx user = next_nonnote_insn (insn);

  if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
    user = XVECEXP (PATTERN (user), 0, 0);

  add_reg_note (user, REG_CC_SETTER, insn);
  add_reg_note (insn, REG_CC_USER, user);
}

/* Return the next insn that uses CC0 after INSN, which is assumed to
   set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
   applied to the result of this function should yield INSN).

   Normally, this is simply the next insn.  However, if a REG_CC_USER note
   is present, it contains the insn that uses CC0.

   Return 0 if we can't find the insn.  */

rtx
next_cc0_user (rtx insn)
{
  rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);

  if (note)
    return XEXP (note, 0);

  insn = next_nonnote_insn (insn);
  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
    insn = XVECEXP (PATTERN (insn), 0, 0);

  if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
    return insn;

  return 0;
}

/* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
   note, it is the previous insn.  */

rtx
prev_cc0_setter (rtx insn)
{
  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);

  if (note)
    return XEXP (note, 0);

  insn = prev_nonnote_insn (insn);
  gcc_assert (sets_cc0_p (PATTERN (insn)));

  return insn;
}
#endif

#ifdef AUTO_INC_DEC
/* Find a RTX_AUTOINC class rtx which matches DATA.  */

static int
find_auto_inc (rtx *xp, void *data)
{
  rtx x = *xp;
  rtx reg = (rtx) data;

  if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
    return 0;

  switch (GET_CODE (x))
    {
      case PRE_DEC:
      case PRE_INC:
      case POST_DEC:
      case POST_INC:
      case PRE_MODIFY:
      case POST_MODIFY:
	if (rtx_equal_p (reg, XEXP (x, 0)))
	  return 1;
	break;

      default:
	gcc_unreachable ();
    }
  return -1;
}
#endif

/* Increment the label uses for all labels present in rtx.  */

static void
mark_label_nuses (rtx x)
{
  enum rtx_code code;
  int i, j;
  const char *fmt;

  code = GET_CODE (x);
  if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
    LABEL_NUSES (XEXP (x, 0))++;

  fmt = GET_RTX_FORMAT (code);
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
    {
      if (fmt[i] == 'e')
	mark_label_nuses (XEXP (x, i));
      else if (fmt[i] == 'E')
	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
	  mark_label_nuses (XVECEXP (x, i, j));
    }
}


/* Try splitting insns that can be split for better scheduling.
   PAT is the pattern which might split.
   TRIAL is the insn providing PAT.
   LAST is nonzero if we should return the last insn of the sequence produced.

   If this routine succeeds in splitting, it returns the first or last
   replacement insn depending on the value of LAST.  Otherwise, it
   returns TRIAL.  If the insn to be returned can be split, it will be.  */

rtx
try_split (rtx pat, rtx trial, int last)
{
  rtx before = PREV_INSN (trial);
  rtx after = NEXT_INSN (trial);
  int has_barrier = 0;
  rtx note, seq, tem;
  int probability;
  rtx insn_last, insn;
  int njumps = 0;

  /* We're not good at redistributing frame information.  */
  if (RTX_FRAME_RELATED_P (trial))
    return trial;

  if (any_condjump_p (trial)
      && (note = find_reg_note (trial, REG_BR_PROB, 0)))
    split_branch_probability = INTVAL (XEXP (note, 0));
  probability = split_branch_probability;

  seq = split_insns (pat, trial);

  split_branch_probability = -1;

  /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
     We may need to handle this specially.  */
  if (after && BARRIER_P (after))
    {
      has_barrier = 1;
      after = NEXT_INSN (after);
    }

  if (!seq)
    return trial;

  /* Avoid infinite loop if any insn of the result matches
     the original pattern.  */
  insn_last = seq;
  while (1)
    {
      if (INSN_P (insn_last)
	  && rtx_equal_p (PATTERN (insn_last), pat))
	return trial;
      if (!NEXT_INSN (insn_last))
	break;
      insn_last = NEXT_INSN (insn_last);
    }

  /* We will be adding the new sequence to the function.  The splitters
     may have introduced invalid RTL sharing, so unshare the sequence now.  */
  unshare_all_rtl_in_chain (seq);

  /* Mark labels.  */
  for (insn = insn_last; insn ; insn = PREV_INSN (insn))
    {
      if (JUMP_P (insn))
	{
	  mark_jump_label (PATTERN (insn), insn, 0);
	  njumps++;
	  if (probability != -1
	      && any_condjump_p (insn)
	      && !find_reg_note (insn, REG_BR_PROB, 0))
	    {
	      /* We can preserve the REG_BR_PROB notes only if exactly
		 one jump is created, otherwise the machine description
		 is responsible for this step using
		 split_branch_probability variable.  */
	      gcc_assert (njumps == 1);
	      add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
	    }
	}
    }

  /* If we are splitting a CALL_INSN, look for the CALL_INSN
     in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
  if (CALL_P (trial))
    {
      for (insn = insn_last; insn ; insn = PREV_INSN (insn))
	if (CALL_P (insn))
	  {
	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
	    while (*p)
	      p = &XEXP (*p, 1);
	    *p = CALL_INSN_FUNCTION_USAGE (trial);
	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);

	    /* Update the debug information for the CALL_INSN.  */
	    if (flag_enable_icf_debug)
	      (*debug_hooks->copy_call_info) (trial, insn);
	  }
    }

  /* Copy notes, particularly those related to the CFG.  */
  for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
    {
      switch (REG_NOTE_KIND (note))
	{
	case REG_EH_REGION:
	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
	  break;

	case REG_NORETURN:
	case REG_SETJMP:
	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
	    {
	      if (CALL_P (insn))
		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
	    }
	  break;

	case REG_NON_LOCAL_GOTO:
	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
	    {
	      if (JUMP_P (insn))
		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
	    }
	  break;

#ifdef AUTO_INC_DEC
	case REG_INC:
	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
	    {
	      rtx reg = XEXP (note, 0);
	      if (!FIND_REG_INC_NOTE (insn, reg)
		  && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
		add_reg_note (insn, REG_INC, reg);
	    }
	  break;
#endif

	default:
	  break;
	}
    }

  /* If there are LABELS inside the split insns increment the
     usage count so we don't delete the label.  */
  if (INSN_P (trial))
    {
      insn = insn_last;
      while (insn != NULL_RTX)
	{
	  /* JUMP_P insns have already been "marked" above.  */
	  if (NONJUMP_INSN_P (insn))
	    mark_label_nuses (PATTERN (insn));

	  insn = PREV_INSN (insn);
	}
    }

  tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));

  delete_insn (trial);
  if (has_barrier)
    emit_barrier_after (tem);

  /* Recursively call try_split for each new insn created; by the
     time control returns here that insn will be fully split, so
     set LAST and continue from the insn after the one returned.
     We can't use next_active_insn here since AFTER may be a note.
     Ignore deleted insns, which can be occur if not optimizing.  */
  for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
    if (! INSN_DELETED_P (tem) && INSN_P (tem))
      tem = try_split (PATTERN (tem), tem, 1);

  /* Return either the first or the last insn, depending on which was
     requested.  */
  return last
    ? (after ? PREV_INSN (after) : last_insn)
    : NEXT_INSN (before);
}

/* Make and return an INSN rtx, initializing all its slots.
   Store PATTERN in the pattern slots.  */

rtx
make_insn_raw (rtx pattern)
{
  rtx insn;

  insn = rtx_alloc (INSN);

  INSN_UID (insn) = cur_insn_uid++;
  PATTERN (insn) = pattern;
  INSN_CODE (insn) = -1;
  REG_NOTES (insn) = NULL;
  INSN_LOCATOR (insn) = curr_insn_locator ();
  BLOCK_FOR_INSN (insn) = NULL;

#ifdef ENABLE_RTL_CHECKING
  if (insn
      && INSN_P (insn)
      && (returnjump_p (insn)
	  || (GET_CODE (insn) == SET
	      && SET_DEST (insn) == pc_rtx)))
    {
      warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
      debug_rtx (insn);
    }
#endif

  return insn;
}

/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */

rtx
make_debug_insn_raw (rtx pattern)
{
  rtx insn;

  insn = rtx_alloc (DEBUG_INSN);
  INSN_UID (insn) = cur_debug_insn_uid++;
  if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
    INSN_UID (insn) = cur_insn_uid++;

  PATTERN (insn) = pattern;
  INSN_CODE (insn) = -1;
  REG_NOTES (insn) = NULL;
  INSN_LOCATOR (insn) = curr_insn_locator ();
  BLOCK_FOR_INSN (insn) = NULL;

  return insn;
}

/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */

rtx
make_jump_insn_raw (rtx pattern)
{
  rtx insn;

  insn = rtx_alloc (JUMP_INSN);
  INSN_UID (insn) = cur_insn_uid++;

  PATTERN (insn) = pattern;
  INSN_CODE (insn) = -1;
  REG_NOTES (insn) = NULL;
  JUMP_LABEL (insn) = NULL;
  INSN_LOCATOR (insn) = curr_insn_locator ();
  BLOCK_FOR_INSN (insn) = NULL;

  return insn;
}

/* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */

static rtx
make_call_insn_raw (rtx pattern)
{
  rtx insn;

  insn = rtx_alloc (CALL_INSN);
  INSN_UID (insn) = cur_insn_uid++;

  PATTERN (insn) = pattern;
  INSN_CODE (insn) = -1;
  REG_NOTES (insn) = NULL;
  CALL_INSN_FUNCTION_USAGE (insn) = NULL;
  INSN_LOCATOR (insn) = curr_insn_locator ();
  BLOCK_FOR_INSN (insn) = NULL;

  return insn;
}

/* Add INSN to the end of the doubly-linked list.
   INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */

void
add_insn (rtx insn)
{
  PREV_INSN (insn) = last_insn;
  NEXT_INSN (insn) = 0;

  if (NULL != last_insn)
    NEXT_INSN (last_insn) = insn;

  if (NULL == first_insn)
    first_insn = insn;

  last_insn = insn;
}

/* Add INSN into the doubly-linked list after insn AFTER.  This and
   the next should be the only functions called to insert an insn once
   delay slots have been filled since only they know how to update a
   SEQUENCE.  */

void
add_insn_after (rtx insn, rtx after, basic_block bb)
{
  rtx next = NEXT_INSN (after);

  gcc_assert (!optimize || !INSN_DELETED_P (after));

  NEXT_INSN (insn) = next;
  PREV_INSN (insn) = after;

  if (next)
    {
      PREV_INSN (next) = insn;
      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
    }
  else if (last_insn == after)
    last_insn = insn;
  else
    {
      struct sequence_stack *stack = seq_stack;
      /* Scan all pending sequences too.  */
      for (; stack; stack = stack->next)
	if (after == stack->last)
	  {
	    stack->last = insn;
	    break;
	  }

      gcc_assert (stack);
    }

  if (!BARRIER_P (after)
      && !BARRIER_P (insn)
      && (bb = BLOCK_FOR_INSN (after)))
    {
      set_block_for_insn (insn, bb);
      if (INSN_P (insn))
	df_insn_rescan (insn);
      /* Should not happen as first in the BB is always
	 either NOTE or LABEL.  */
      if (BB_END (bb) == after
	  /* Avoid clobbering of structure when creating new BB.  */
	  && !BARRIER_P (insn)
	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
	BB_END (bb) = insn;
    }

  NEXT_INSN (after) = insn;
  if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
    {
      rtx sequence = PATTERN (after);
      NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
    }
}

/* Add INSN into the doubly-linked list before insn BEFORE.  This and
   the previous should be the only functions called to insert an insn
   once delay slots have been filled since only they know how to
   update a SEQUENCE.  If BB is NULL, an attempt is made to infer the
   bb from before.  */

void
add_insn_before (rtx insn, rtx before, basic_block bb)
{
  rtx prev = PREV_INSN (before);

  gcc_assert (!optimize || !INSN_DELETED_P (before));

  PREV_INSN (insn) = prev;
  NEXT_INSN (insn) = before;

  if (prev)
    {
      NEXT_INSN (prev) = insn;
      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
	{
	  rtx sequence = PATTERN (prev);
	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
	}
    }
  else if (first_insn == before)
    first_insn = insn;
  else
    {
      struct sequence_stack *stack = seq_stack;
      /* Scan all pending sequences too.  */
      for (; stack; stack = stack->next)
	if (before == stack->first)
	  {
	    stack->first = insn;
	    break;
	  }

      gcc_assert (stack);
    }

  if (!bb
      && !BARRIER_P (before)
      && !BARRIER_P (insn))
    bb = BLOCK_FOR_INSN (before);

  if (bb)
    {
      set_block_for_insn (insn, bb);
      if (INSN_P (insn))
	df_insn_rescan (insn);
      /* Should not happen as first in the BB is always either NOTE or
	 LABEL.  */
      gcc_assert (BB_HEAD (bb) != insn
		  /* Avoid clobbering of structure when creating new BB.  */
		  || BARRIER_P (insn)
		  || NOTE_INSN_BASIC_BLOCK_P (insn));
    }

  PREV_INSN (before) = insn;
  if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
    PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}


/* Replace insn with an deleted instruction note.  */

void
set_insn_deleted (rtx insn)
{
  df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
  PUT_CODE (insn, NOTE);
  NOTE_KIND (insn) = NOTE_INSN_DELETED;
}


/* Remove an insn from its doubly-linked list.  This function knows how
   to handle sequences.  */
void
remove_insn (rtx insn)
{
  rtx next = NEXT_INSN (insn);
  rtx prev = PREV_INSN (insn);
  basic_block bb;

  /* Later in the code, the block will be marked dirty.  */
  df_insn_delete (NULL, INSN_UID (insn));

  if (prev)
    {
      NEXT_INSN (prev) = next;
      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
	{
	  rtx sequence = PATTERN (prev);
	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
	}
    }
  else if (first_insn == insn)
    first_insn = next;
  else
    {
      struct sequence_stack *stack = seq_stack;
      /* Scan all pending sequences too.  */
      for (; stack; stack = stack->next)
	if (insn == stack->first)
	  {
	    stack->first = next;
	    break;
	  }

      gcc_assert (stack);
    }

  if (next)
    {
      PREV_INSN (next) = prev;
      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
    }
  else if (last_insn == insn)
    last_insn = prev;
  else
    {
      struct sequence_stack *stack = seq_stack;
      /* Scan all pending sequences too.  */
      for (; stack; stack = stack->next)
	if (insn == stack->last)
	  {
	    stack->last = prev;
	    break;
	  }

      gcc_assert (stack);
    }
  if (!BARRIER_P (insn)
      && (bb = BLOCK_FOR_INSN (insn)))
    {
      if (INSN_P (insn))
	df_set_bb_dirty (bb);
      if (BB_HEAD (bb) == insn)
	{
	  /* Never ever delete the basic block note without deleting whole
	     basic block.  */
	  gcc_assert (!NOTE_P (insn));
	  BB_HEAD (bb) = next;
	}
      if (BB_END (bb) == insn)
	BB_END (bb) = prev;
    }
}

/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */

void
add_function_usage_to (rtx call_insn, rtx call_fusage)
{
  gcc_assert (call_insn && CALL_P (call_insn));

  /* Put the register usage information on the CALL.  If there is already
     some usage information, put ours at the end.  */
  if (CALL_INSN_FUNCTION_USAGE (call_insn))
    {
      rtx link;

      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
	   link = XEXP (link, 1))
	;

      XEXP (link, 1) = call_fusage;
    }
  else
    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
}

/* Delete all insns made since FROM.
   FROM becomes the new last instruction.  */

void
delete_insns_since (rtx from)
{
  if (from == 0)
    first_insn = 0;
  else
    NEXT_INSN (from) = 0;
  last_insn = from;
}

/* This function is deprecated, please use sequences instead.

   Move a consecutive bunch of insns to a different place in the chain.
   The insns to be moved are those between FROM and TO.
   They are moved to a new position after the insn AFTER.
   AFTER must not be FROM or TO or any insn in between.

   This function does not know about SEQUENCEs and hence should not be
   called after delay-slot filling has been done.  */

void
reorder_insns_nobb (rtx from, rtx to, rtx after)
{
  /* Splice this bunch out of where it is now.  */
  if (PREV_INSN (from))
    NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
  if (NEXT_INSN (to))
    PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
  if (last_insn == to)
    last_insn = PREV_INSN (from);
  if (first_insn == from)
    first_insn = NEXT_INSN (to);

  /* Make the new neighbors point to it and it to them.  */
  if (NEXT_INSN (after))
    PREV_INSN (NEXT_INSN (after)) = to;

  NEXT_INSN (to) = NEXT_INSN (after);
  PREV_INSN (from) = after;
  NEXT_INSN (after) = from;
  if (after == last_insn)
    last_insn = to;
}

/* Same as function above, but take care to update BB boundaries.  */
void
reorder_insns (rtx from, rtx to, rtx after)
{
  rtx prev = PREV_INSN (from);
  basic_block bb, bb2;

  reorder_insns_nobb (from, to, after);

  if (!BARRIER_P (after)
      && (bb = BLOCK_FOR_INSN (after)))
    {
      rtx x;
      df_set_bb_dirty (bb);

      if (!BARRIER_P (from)
	  && (bb2 = BLOCK_FOR_INSN (from)))
	{
	  if (BB_END (bb2) == to)
	    BB_END (bb2) = prev;
	  df_set_bb_dirty (bb2);
	}

      if (BB_END (bb) == after)
	BB_END (bb) = to;

      for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
	if (!BARRIER_P (x))
	  df_insn_change_bb (x, bb);
    }
}


/* Emit insn(s) of given code and pattern
   at a specified place within the doubly-linked list.

   All of the emit_foo global entry points accept an object
   X which is either an insn list or a PATTERN of a single
   instruction.

   There are thus a few canonical ways to generate code and
   emit it at a specific place in the instruction stream.  For
   example, consider the instruction named SPOT and the fact that
   we would like to emit some instructions before SPOT.  We might
   do it like this:

	start_sequence ();
	... emit the new instructions ...
	insns_head = get_insns ();
	end_sequence ();

	emit_insn_before (insns_head, SPOT);

   It used to be common to generate SEQUENCE rtl instead, but that
   is a relic of the past which no longer occurs.  The reason is that
   SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
   generated would almost certainly die right after it was created.  */

/* Make X be output before the instruction BEFORE.  */

rtx
emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
{
  rtx last = before;
  rtx insn;

  gcc_assert (before);

  if (x == NULL_RTX)
    return last;

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn_before (insn, before, bb);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_insn_raw (x);
      add_insn_before (last, before, bb);
      break;
    }

  return last;
}

/* Make an instruction with body X and code JUMP_INSN
   and output it before the instruction BEFORE.  */

rtx
emit_jump_insn_before_noloc (rtx x, rtx before)
{
  rtx insn, last = NULL_RTX;

  gcc_assert (before);

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn_before (insn, before, NULL);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_jump_insn_raw (x);
      add_insn_before (last, before, NULL);
      break;
    }

  return last;
}

/* Make an instruction with body X and code CALL_INSN
   and output it before the instruction BEFORE.  */

rtx
emit_call_insn_before_noloc (rtx x, rtx before)
{
  rtx last = NULL_RTX, insn;

  gcc_assert (before);

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn_before (insn, before, NULL);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_call_insn_raw (x);
      add_insn_before (last, before, NULL);
      break;
    }

  return last;
}

/* Make an instruction with body X and code DEBUG_INSN
   and output it before the instruction BEFORE.  */

rtx
emit_debug_insn_before_noloc (rtx x, rtx before)
{
  rtx last = NULL_RTX, insn;

  gcc_assert (before);

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn_before (insn, before, NULL);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_debug_insn_raw (x);
      add_insn_before (last, before, NULL);
      break;
    }

  return last;
}

/* Make an insn of code BARRIER
   and output it before the insn BEFORE.  */

rtx
emit_barrier_before (rtx before)
{
  rtx insn = rtx_alloc (BARRIER);

  INSN_UID (insn) = cur_insn_uid++;

  add_insn_before (insn, before, NULL);
  return insn;
}

/* Emit the label LABEL before the insn BEFORE.  */

rtx
emit_label_before (rtx label, rtx before)
{
  /* This can be called twice for the same label as a result of the
     confusion that follows a syntax error!  So make it harmless.  */
  if (INSN_UID (label) == 0)
    {
      INSN_UID (label) = cur_insn_uid++;
      add_insn_before (label, before, NULL);
    }

  return label;
}

/* Emit a note of subtype SUBTYPE before the insn BEFORE.  */

rtx
emit_note_before (enum insn_note subtype, rtx before)
{
  rtx note = rtx_alloc (NOTE);
  INSN_UID (note) = cur_insn_uid++;
  NOTE_KIND (note) = subtype;
  BLOCK_FOR_INSN (note) = NULL;
  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));

  add_insn_before (note, before, NULL);
  return note;
}

/* Helper for emit_insn_after, handles lists of instructions
   efficiently.  */

static rtx
emit_insn_after_1 (rtx first, rtx after, basic_block bb)
{
  rtx last;
  rtx after_after;
  if (!bb && !BARRIER_P (after))
    bb = BLOCK_FOR_INSN (after);

  if (bb)
    {
      df_set_bb_dirty (bb);
      for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
	if (!BARRIER_P (last))
	  {
	    set_block_for_insn (last, bb);
	    df_insn_rescan (last);
	  }
      if (!BARRIER_P (last))
	{
	  set_block_for_insn (last, bb);
	  df_insn_rescan (last);
	}
      if (BB_END (bb) == after)
	BB_END (bb) = last;
    }
  else
    for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
      continue;

  after_after = NEXT_INSN (after);

  NEXT_INSN (after) = first;
  PREV_INSN (first) = after;
  NEXT_INSN (last) = after_after;
  if (after_after)
    PREV_INSN (after_after) = last;

  if (after == last_insn)
    last_insn = last;

  return last;
}

/* Make X be output after the insn AFTER and set the BB of insn.  If
   BB is NULL, an attempt is made to infer the BB from AFTER.  */

rtx
emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
{
  rtx last = after;

  gcc_assert (after);

  if (x == NULL_RTX)
    return last;

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      last = emit_insn_after_1 (x, after, bb);
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_insn_raw (x);
      add_insn_after (last, after, bb);
      break;
    }

  return last;
}


/* Make an insn of code JUMP_INSN with body X
   and output it after the insn AFTER.  */

rtx
emit_jump_insn_after_noloc (rtx x, rtx after)
{
  rtx last;

  gcc_assert (after);

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      last = emit_insn_after_1 (x, after, NULL);
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_jump_insn_raw (x);
      add_insn_after (last, after, NULL);
      break;
    }

  return last;
}

/* Make an instruction with body X and code CALL_INSN
   and output it after the instruction AFTER.  */

rtx
emit_call_insn_after_noloc (rtx x, rtx after)
{
  rtx last;

  gcc_assert (after);

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      last = emit_insn_after_1 (x, after, NULL);
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_call_insn_raw (x);
      add_insn_after (last, after, NULL);
      break;
    }

  return last;
}

/* Make an instruction with body X and code CALL_INSN
   and output it after the instruction AFTER.  */

rtx
emit_debug_insn_after_noloc (rtx x, rtx after)
{
  rtx last;

  gcc_assert (after);

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      last = emit_insn_after_1 (x, after, NULL);
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_debug_insn_raw (x);
      add_insn_after (last, after, NULL);
      break;
    }

  return last;
}

/* Make an insn of code BARRIER
   and output it after the insn AFTER.  */

rtx
emit_barrier_after (rtx after)
{
  rtx insn = rtx_alloc (BARRIER);

  INSN_UID (insn) = cur_insn_uid++;

  add_insn_after (insn, after, NULL);
  return insn;
}

/* Emit the label LABEL after the insn AFTER.  */

rtx
emit_label_after (rtx label, rtx after)
{
  /* This can be called twice for the same label
     as a result of the confusion that follows a syntax error!
     So make it harmless.  */
  if (INSN_UID (label) == 0)
    {
      INSN_UID (label) = cur_insn_uid++;
      add_insn_after (label, after, NULL);
    }

  return label;
}

/* Emit a note of subtype SUBTYPE after the insn AFTER.  */

rtx
emit_note_after (enum insn_note subtype, rtx after)
{
  rtx note = rtx_alloc (NOTE);
  INSN_UID (note) = cur_insn_uid++;
  NOTE_KIND (note) = subtype;
  BLOCK_FOR_INSN (note) = NULL;
  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
  add_insn_after (note, after, NULL);
  return note;
}

/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
rtx
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
{
  rtx last = emit_insn_after_noloc (pattern, after, NULL);

  if (pattern == NULL_RTX || !loc)
    return last;

  after = NEXT_INSN (after);
  while (1)
    {
      if (active_insn_p (after) && !INSN_LOCATOR (after))
	INSN_LOCATOR (after) = loc;
      if (after == last)
	break;
      after = NEXT_INSN (after);
    }
  return last;
}

/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
rtx
emit_insn_after (rtx pattern, rtx after)
{
  rtx prev = after;

  while (DEBUG_INSN_P (prev))
    prev = PREV_INSN (prev);

  if (INSN_P (prev))
    return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
  else
    return emit_insn_after_noloc (pattern, after, NULL);
}

/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
rtx
emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
{
  rtx last = emit_jump_insn_after_noloc (pattern, after);

  if (pattern == NULL_RTX || !loc)
    return last;

  after = NEXT_INSN (after);
  while (1)
    {
      if (active_insn_p (after) && !INSN_LOCATOR (after))
	INSN_LOCATOR (after) = loc;
      if (after == last)
	break;
      after = NEXT_INSN (after);
    }
  return last;
}

/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
rtx
emit_jump_insn_after (rtx pattern, rtx after)
{
  rtx prev = after;

  while (DEBUG_INSN_P (prev))
    prev = PREV_INSN (prev);

  if (INSN_P (prev))
    return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
  else
    return emit_jump_insn_after_noloc (pattern, after);
}

/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
rtx
emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
{
  rtx last = emit_call_insn_after_noloc (pattern, after);

  if (pattern == NULL_RTX || !loc)
    return last;

  after = NEXT_INSN (after);
  while (1)
    {
      if (active_insn_p (after) && !INSN_LOCATOR (after))
	INSN_LOCATOR (after) = loc;
      if (after == last)
	break;
      after = NEXT_INSN (after);
    }
  return last;
}

/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
rtx
emit_call_insn_after (rtx pattern, rtx after)
{
  rtx prev = after;

  while (DEBUG_INSN_P (prev))
    prev = PREV_INSN (prev);

  if (INSN_P (prev))
    return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
  else
    return emit_call_insn_after_noloc (pattern, after);
}

/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
rtx
emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
{
  rtx last = emit_debug_insn_after_noloc (pattern, after);

  if (pattern == NULL_RTX || !loc)
    return last;

  after = NEXT_INSN (after);
  while (1)
    {
      if (active_insn_p (after) && !INSN_LOCATOR (after))
	INSN_LOCATOR (after) = loc;
      if (after == last)
	break;
      after = NEXT_INSN (after);
    }
  return last;
}

/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
rtx
emit_debug_insn_after (rtx pattern, rtx after)
{
  if (INSN_P (after))
    return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
  else
    return emit_debug_insn_after_noloc (pattern, after);
}

/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
rtx
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
{
  rtx first = PREV_INSN (before);
  rtx last = emit_insn_before_noloc (pattern, before, NULL);

  if (pattern == NULL_RTX || !loc)
    return last;

  if (!first)
    first = get_insns ();
  else
    first = NEXT_INSN (first);
  while (1)
    {
      if (active_insn_p (first) && !INSN_LOCATOR (first))
	INSN_LOCATOR (first) = loc;
      if (first == last)
	break;
      first = NEXT_INSN (first);
    }
  return last;
}

/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
rtx
emit_insn_before (rtx pattern, rtx before)
{
  rtx next = before;

  while (DEBUG_INSN_P (next))
    next = PREV_INSN (next);

  if (INSN_P (next))
    return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
  else
    return emit_insn_before_noloc (pattern, before, NULL);
}

/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
rtx
emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
{
  rtx first = PREV_INSN (before);
  rtx last = emit_jump_insn_before_noloc (pattern, before);

  if (pattern == NULL_RTX)
    return last;

  first = NEXT_INSN (first);
  while (1)
    {
      if (active_insn_p (first) && !INSN_LOCATOR (first))
	INSN_LOCATOR (first) = loc;
      if (first == last)
	break;
      first = NEXT_INSN (first);
    }
  return last;
}

/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
rtx
emit_jump_insn_before (rtx pattern, rtx before)
{
  rtx next = before;

  while (DEBUG_INSN_P (next))
    next = PREV_INSN (next);

  if (INSN_P (next))
    return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
  else
    return emit_jump_insn_before_noloc (pattern, before);
}

/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
rtx
emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
{
  rtx first = PREV_INSN (before);
  rtx last = emit_call_insn_before_noloc (pattern, before);

  if (pattern == NULL_RTX)
    return last;

  first = NEXT_INSN (first);
  while (1)
    {
      if (active_insn_p (first) && !INSN_LOCATOR (first))
	INSN_LOCATOR (first) = loc;
      if (first == last)
	break;
      first = NEXT_INSN (first);
    }
  return last;
}

/* like emit_call_insn_before_noloc,
   but set insn_locator according to before.  */
rtx
emit_call_insn_before (rtx pattern, rtx before)
{
  rtx next = before;

  while (DEBUG_INSN_P (next))
    next = PREV_INSN (next);

  if (INSN_P (next))
    return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
  else
    return emit_call_insn_before_noloc (pattern, before);
}

/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
rtx
emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
{
  rtx first = PREV_INSN (before);
  rtx last = emit_debug_insn_before_noloc (pattern, before);

  if (pattern == NULL_RTX)
    return last;

  first = NEXT_INSN (first);
  while (1)
    {
      if (active_insn_p (first) && !INSN_LOCATOR (first))
	INSN_LOCATOR (first) = loc;
      if (first == last)
	break;
      first = NEXT_INSN (first);
    }
  return last;
}

/* like emit_debug_insn_before_noloc,
   but set insn_locator according to before.  */
rtx
emit_debug_insn_before (rtx pattern, rtx before)
{
  if (INSN_P (before))
    return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
  else
    return emit_debug_insn_before_noloc (pattern, before);
}

/* Take X and emit it at the end of the doubly-linked
   INSN list.

   Returns the last insn emitted.  */

rtx
emit_insn (rtx x)
{
  rtx last = last_insn;
  rtx insn;

  if (x == NULL_RTX)
    return last;

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn (insn);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_insn_raw (x);
      add_insn (last);
      break;
    }

  return last;
}

/* Make an insn of code DEBUG_INSN with pattern X
   and add it to the end of the doubly-linked list.  */

rtx
emit_debug_insn (rtx x)
{
  rtx last = last_insn;
  rtx insn;

  if (x == NULL_RTX)
    return last;

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn (insn);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_debug_insn_raw (x);
      add_insn (last);
      break;
    }

  return last;
}

/* Make an insn of code JUMP_INSN with pattern X
   and add it to the end of the doubly-linked list.  */

rtx
emit_jump_insn (rtx x)
{
  rtx last = NULL_RTX, insn;

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = x;
      while (insn)
	{
	  rtx next = NEXT_INSN (insn);
	  add_insn (insn);
	  last = insn;
	  insn = next;
	}
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      last = make_jump_insn_raw (x);
      add_insn (last);
      break;
    }

  return last;
}

/* Make an insn of code CALL_INSN with pattern X
   and add it to the end of the doubly-linked list.  */

rtx
emit_call_insn (rtx x)
{
  rtx insn;

  switch (GET_CODE (x))
    {
    case DEBUG_INSN:
    case INSN:
    case JUMP_INSN:
    case CALL_INSN:
    case CODE_LABEL:
    case BARRIER:
    case NOTE:
      insn = emit_insn (x);
      break;

#ifdef ENABLE_RTL_CHECKING
    case SEQUENCE:
      gcc_unreachable ();
      break;
#endif

    default:
      insn = make_call_insn_raw (x);
      add_insn (insn);
      break;
    }

  return insn;
}

/* Add the label LABEL to the end of the doubly-linked list.  */

rtx
emit_label (rtx label)
{
  /* This can be called twice for the same label
     as a result of the confusion that follows a syntax error!
     So make it harmless.  */
  if (INSN_UID (label) == 0)
    {
      INSN_UID (label) = cur_insn_uid++;
      add_insn (label);
    }
  return label;
}

/* Make an insn of code BARRIER
   and add it to the end of the doubly-linked list.  */

rtx
emit_barrier (void)
{
  rtx barrier = rtx_alloc (BARRIER);
  INSN_UID (barrier) = cur_insn_uid++;
  add_insn (barrier);
  return barrier;
}

/* Emit a copy of note ORIG.  */

rtx
emit_note_copy (rtx orig)
{
  rtx note;

  note = rtx_alloc (NOTE);

  INSN_UID (note) = cur_insn_uid++;
  NOTE_DATA (note) = NOTE_DATA (orig);
  NOTE_KIND (note) = NOTE_KIND (orig);
  BLOCK_FOR_INSN (note) = NULL;
  add_insn (note);

  return note;
}

/* Make an insn of code NOTE or type NOTE_NO
   and add it to the end of the doubly-linked list.  */

rtx
emit_note (enum insn_note kind)
{
  rtx note;

  note = rtx_alloc (NOTE);
  INSN_UID (note) = cur_insn_uid++;
  NOTE_KIND (note) = kind;
  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
  BLOCK_FOR_INSN (note) = NULL;
  add_insn (note);
  return note;
}

/* Emit a clobber of lvalue X.  */

rtx
emit_clobber (rtx x)
{
  /* CONCATs should not appear in the insn stream.  */
  if (GET_CODE (x) == CONCAT)
    {
      emit_clobber (XEXP (x, 0));
      return emit_clobber (XEXP (x, 1));
    }
  return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
}

/* Return a sequence of insns to clobber lvalue X.  */

rtx
gen_clobber (rtx x)
{
  rtx seq;

  start_sequence ();
  emit_clobber (x);
  seq = get_insns ();
  end_sequence ();
  return seq;
}

/* Emit a use of rvalue X.  */

rtx
emit_use (rtx x)
{
  /* CONCATs should not appear in the insn stream.  */
  if (GET_CODE (x) == CONCAT)
    {
      emit_use (XEXP (x, 0));
      return emit_use (XEXP (x, 1));
    }
  return emit_insn (gen_rtx_USE (VOIDmode, x));
}

/* Return a sequence of insns to use rvalue X.  */

rtx
gen_use (rtx x)
{
  rtx seq;

  start_sequence ();
  emit_use (x);
  seq = get_insns ();
  end_sequence ();
  return seq;
}

/* Cause next statement to emit a line note even if the line number
   has not changed.  */

void
force_next_line_note (void)
{
  last_location = -1;
}

/* Place a note of KIND on insn INSN with DATUM as the datum. If a
   note of this type already exists, remove it first.  */

rtx
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
{
  rtx note = find_reg_note (insn, kind, NULL_RTX);

  switch (kind)
    {
    case REG_EQUAL:
    case REG_EQUIV:
      /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
	 has multiple sets (some callers assume single_set
	 means the insn only has one set, when in fact it
	 means the insn only has one * useful * set).  */
      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
	{
	  gcc_assert (!note);
	  return NULL_RTX;
	}

      /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
	 It serves no useful purpose and breaks eliminate_regs.  */
      if (GET_CODE (datum) == ASM_OPERANDS)
	return NULL_RTX;

      if (note)
	{
	  XEXP (note, 0) = datum;
	  df_notes_rescan (insn);
	  return note;
	}
      break;

    default:
      if (note)
	{
	  XEXP (note, 0) = datum;
	  return note;
	}
      break;
    }

  add_reg_note (insn, kind, datum);

  switch (kind)
    {
    case REG_EQUAL:
    case REG_EQUIV:
      df_notes_rescan (insn);
      break;
    default:
      break;
    }

  return REG_NOTES (insn);
}

/* Return an indication of which type of insn should have X as a body.
   The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */

static enum rtx_code
classify_insn (rtx x)
{
  if (LABEL_P (x))
    return CODE_LABEL;
  if (GET_CODE (x) == CALL)
    return CALL_INSN;
  if (GET_CODE (x) == RETURN)
    return JUMP_INSN;
  if (GET_CODE (x) == SET)
    {
      if (SET_DEST (x) == pc_rtx)
	return JUMP_INSN;
      else if (GET_CODE (SET_SRC (x)) == CALL)
	return CALL_INSN;
      else
	return INSN;
    }
  if (GET_CODE (x) == PARALLEL)
    {
      int j;
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
	  return CALL_INSN;
	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
	  return JUMP_INSN;
	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
	  return CALL_INSN;
    }
  return INSN;
}

/* Emit the rtl pattern X as an appropriate kind of insn.
   If X is a label, it is simply added into the insn chain.  */

rtx
emit (rtx x)
{
  enum rtx_code code = classify_insn (x);