aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/cfgcleanup.c2
-rw-r--r--gcc/config/dfp-bit.c2
-rw-r--r--gcc/expr.c2
-rw-r--r--gcc/fold-const.c2
-rw-r--r--gcc/jump.c2
-rw-r--r--gcc/mips-tfile.c2
-rw-r--r--gcc/omp-low.c4
-rw-r--r--gcc/sched-int.h2
-rw-r--r--gcc/tree-ssa-loop-prefetch.c2
-rw-r--r--gcc/tree-vrp.c2
11 files changed, 17 insertions, 11 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 5702d7b..e50a047 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2006-04-15 Kazu Hirata <kazu@codesourcery.com>
+
+ * cfgcleanup.c, config/dfp-bit.c, expr.c, fold-const.c,
+ jump.c, mips-tfile.c, omp-low.c, sched-int.h,
+ tree-ssa-loop-prefetch.c, tree-vrp.c: Fix comment typos.
+
2006-04-15 John David Anglin <dave.anglin@nrc-cnrc.gc.ca>
* pa.md (output_bb patterns): Shorten long branch alternatives by 4.
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index 954a956..08bd0b6 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -1667,7 +1667,7 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
&& (newpos1 != BB_HEAD (src1)))
return false;
- /* Avoid deleting preserve label when redirecting ABNORMAL edeges. */
+ /* Avoid deleting preserve label when redirecting ABNORMAL edges. */
if (block_has_preserve_label (e1->dest)
&& (e1->flags & EDGE_ABNORMAL))
return false;
diff --git a/gcc/config/dfp-bit.c b/gcc/config/dfp-bit.c
index 271bfb5..c9374c5 100644
--- a/gcc/config/dfp-bit.c
+++ b/gcc/config/dfp-bit.c
@@ -411,7 +411,7 @@ DFP_TO_INT (DFP_C_TYPE x)
TO_INTERNAL (&s, &n1);
/* Rescale if the exponent is less than zero. */
decNumberToIntegralValue (&n2, &n1, &context);
- /* Get a value to use for the quanitize call. */
+ /* Get a value to use for the quantize call. */
decNumberFromString (&qval, (char *) "1.0", &context);
/* Force the exponent to zero. */
decNumberQuantize (&n1, &n2, &qval, &context);
diff --git a/gcc/expr.c b/gcc/expr.c
index 48e23a9..8d2839f 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -7717,7 +7717,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
else if (!MEM_P (op0))
{
/* If the operand is not a MEM, force it into memory. Since we
- are going to be be changing the mode of the MEM, don't call
+ are going to be changing the mode of the MEM, don't call
force_const_mem for constants because we don't allow pool
constants to change mode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 3a8327b..ddc56f6 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -7097,7 +7097,7 @@ native_interpret_vector (tree type, unsigned char *ptr, int len)
}
-/* Subroutine of fold_view_convert_expr. Interpet the contents of
+/* Subroutine of fold_view_convert_expr. Interpret the contents of
the buffer PTR of length LEN as a constant of type TYPE. For
INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
we return a REAL_CST, etc... If the buffer cannot be interpreted,
diff --git a/gcc/jump.c b/gcc/jump.c
index b60c284..b087394 100644
--- a/gcc/jump.c
+++ b/gcc/jump.c
@@ -21,7 +21,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
/* This is the pathetic reminder of old fame of the jump-optimization pass
- of the compiler. Now it contains basically set of utility function to
+ of the compiler. Now it contains basically a set of utility functions to
operate with jumps.
Each CODE_LABEL has a count of the times it is used
diff --git a/gcc/mips-tfile.c b/gcc/mips-tfile.c
index 3d998f4..7530ac7 100644
--- a/gcc/mips-tfile.c
+++ b/gcc/mips-tfile.c
@@ -4363,7 +4363,7 @@ copy_object (void)
/* Read in each of the sections if they exist in the object file.
- We read things in in the order the mips assembler creates the
+ We read things in the order the mips assembler creates the
sections, so in theory no extra seeks are done.
For simplicity sake, round each read up to a page boundary,
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index 5578e47..bcc3650 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -2233,10 +2233,10 @@ remove_exit_barrier (struct omp_region *region)
exit_bb = region->exit;
/* The last insn in the block will be the parallel's OMP_RETURN. The
- workshare's OMP_RETURN will be in a preceeding block. The kinds of
+ workshare's OMP_RETURN will be in a preceding block. The kinds of
statements that can appear in between are extremely limited -- no
memory operations at all. Here, we allow nothing at all, so the
- only thing we allow to preceed this OMP_RETURN is a label. */
+ only thing we allow to precede this OMP_RETURN is a label. */
si = bsi_last (exit_bb);
gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
bsi_prev (&si);
diff --git a/gcc/sched-int.h b/gcc/sched-int.h
index 5b042a0..bee8a98 100644
--- a/gcc/sched-int.h
+++ b/gcc/sched-int.h
@@ -437,7 +437,7 @@ enum SPEC_TYPES_OFFSETS {
/* This dependence is to the instruction in the recovery block, that was
formed to recover after control-speculation failure.
- Thus, this dependence can be be overcome with generating of the copy of
+ Thus, this dependence can be overcome with generating of the copy of
this instruction in the recovery block. */
#define BE_IN_CONTROL (((ds_t) DEP_WEAK_MASK) << BE_IN_CONTROL_BITS_OFFSET)
diff --git a/gcc/tree-ssa-loop-prefetch.c b/gcc/tree-ssa-loop-prefetch.c
index 4b63d07..b58dbf6 100644
--- a/gcc/tree-ssa-loop-prefetch.c
+++ b/gcc/tree-ssa-loop-prefetch.c
@@ -204,7 +204,7 @@ struct mem_ref
struct mem_ref *next; /* The next reference in the group. */
};
-/* Dumps information obout reference REF to FILE. */
+/* Dumps information about reference REF to FILE. */
static void
dump_mem_ref (FILE *file, struct mem_ref *ref)
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index fa41959..872a466 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -1703,7 +1703,7 @@ extract_range_from_unary_expr (value_range_t *vr, tree expr)
new_max = fold_convert (outer_type, orig_max);
/* Verify the new min/max values are gimple values and
- that they compare equal to the orignal input's
+ that they compare equal to the original input's
min/max values. */
if (is_gimple_val (new_min)
&& is_gimple_val (new_max)