aboutsummaryrefslogtreecommitdiff
path: root/lldb/source/Utility/StringExtractor.cpp
diff options
context:
space:
mode:
authorGreg Clayton <gclayton@apple.com>2013-01-25 18:06:21 +0000
committerGreg Clayton <gclayton@apple.com>2013-01-25 18:06:21 +0000
commitc7bece56faa5eef1c3d141d0c0b0b68b28a9aed2 (patch)
tree9a0132fc3b0bb4f38d06a0f352ee75ac57994771 /lldb/source/Utility/StringExtractor.cpp
parentd0ed6c249dbd6bd488b6491b536a387548c00f7e (diff)
downloadllvm-c7bece56faa5eef1c3d141d0c0b0b68b28a9aed2.zip
llvm-c7bece56faa5eef1c3d141d0c0b0b68b28a9aed2.tar.gz
llvm-c7bece56faa5eef1c3d141d0c0b0b68b28a9aed2.tar.bz2
<rdar://problem/13069948>
Major fixed to allow reading files that are over 4GB. The main problems were that the DataExtractor was using 32 bit offsets as a data cursor, and since we mmap all of our object files we could run into cases where if we had a very large core file that was over 4GB, we were running into the 4GB boundary. So I defined a new "lldb::offset_t" which should be used for all file offsets. After making this change, I enabled warnings for data loss and for enexpected implicit conversions temporarily and found a ton of things that I fixed. Any functions that take an index internally, should use "size_t" for any indexes and also should return "size_t" for any sizes of collections. llvm-svn: 173463
Diffstat (limited to 'lldb/source/Utility/StringExtractor.cpp')
-rw-r--r--lldb/source/Utility/StringExtractor.cpp20
1 files changed, 10 insertions, 10 deletions
diff --git a/lldb/source/Utility/StringExtractor.cpp b/lldb/source/Utility/StringExtractor.cpp
index bbe01d2..2f4bcec 100644
--- a/lldb/source/Utility/StringExtractor.cpp
+++ b/lldb/source/Utility/StringExtractor.cpp
@@ -134,7 +134,7 @@ StringExtractor::GetChar (char fail_value)
++m_index;
return ch;
}
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -157,7 +157,7 @@ StringExtractor::GetHexU8 (uint8_t fail_value, bool set_eof_on_fail)
}
}
if (set_eof_on_fail || m_index >= m_packet.size())
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -195,7 +195,7 @@ StringExtractor::GetHexMaxU32 (bool little_endian, uint32_t fail_value)
// Make sure we don't exceed the size of a uint32_t...
if (nibble_count >= (sizeof(uint32_t) * 2))
{
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -227,7 +227,7 @@ StringExtractor::GetHexMaxU32 (bool little_endian, uint32_t fail_value)
// Make sure we don't exceed the size of a uint32_t...
if (nibble_count >= (sizeof(uint32_t) * 2))
{
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -257,7 +257,7 @@ StringExtractor::GetHexMaxU64 (bool little_endian, uint64_t fail_value)
// Make sure we don't exceed the size of a uint64_t...
if (nibble_count >= (sizeof(uint64_t) * 2))
{
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -289,7 +289,7 @@ StringExtractor::GetHexMaxU64 (bool little_endian, uint64_t fail_value)
// Make sure we don't exceed the size of a uint64_t...
if (nibble_count >= (sizeof(uint64_t) * 2))
{
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -341,7 +341,7 @@ StringExtractor::GetHexWithFixedSize (uint32_t byte_size, bool little_endian, ui
// Little Endian
uint32_t shift_amount;
for (i = 0, shift_amount = 0;
- i < byte_size && m_index != UINT32_MAX;
+ i < byte_size && IsGood();
++i, shift_amount += 8)
{
result |= ((uint64_t)GetHexU8() << shift_amount);
@@ -350,14 +350,14 @@ StringExtractor::GetHexWithFixedSize (uint32_t byte_size, bool little_endian, ui
else
{
// Big Endian
- for (i = 0; i < byte_size && m_index != UINT32_MAX; ++i)
+ for (i = 0; i < byte_size && IsGood(); ++i)
{
result <<= 8;
result |= GetHexU8();
}
}
}
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return fail_value;
}
@@ -392,6 +392,6 @@ StringExtractor::GetNameColonValue (std::string &name, std::string &value)
}
}
}
- m_index = UINT32_MAX;
+ m_index = UINT64_MAX;
return false;
}