Fix TestProcessAPI.py to only allocate sys.maxsize buffer

I hardcoded nearly a UINT64_MAX number in this test case,
and python is not able to convert it to a long on some
platforms.  Use sys.maxsize instead; this also would have
failed if the testsuite was run on a 32-bit system.
This commit is contained in:
Jason Molenda
2023-02-07 15:25:51 -08:00
parent 8cbf041ecb
commit 4a8cc285e9

View File

@@ -3,6 +3,7 @@ Test SBProcess APIs, including ReadMemory(), WriteMemory(), and others.
"""
import lldb
import sys
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test.lldbutil import get_stopped_thread, state_type_to_str
@@ -76,15 +77,16 @@ class ProcessAPITestCase(TestBase):
# will try to malloc it and fail, we should get an error
# result.
error = lldb.SBError()
bigsize = sys.maxsize - 8;
content = process.ReadMemory(
val.AddressOf().GetValueAsUnsigned(),
0xffffffffffffffe8, error)
bigsize, error)
if error.Success():
self.assertFalse(error.Success(), "SBProcessReadMemory claims to have "
"successfully read 0xffffffffffffffe8 bytes")
"successfully read 0x%x bytes" % bigsize)
if self.TraceOn():
print("Tried to read 0xffffffffffffffe8 bytes, got error message: ",
error.GetCString())
print("Tried to read 0x%x bytes, got error message: %s" %
(bigsize, error.GetCString()))
# Read (char *)my_char_ptr.
val = frame.FindValue("my_char_ptr", lldb.eValueTypeVariableGlobal)