Enable more lint checks (#2048)

* pyupgrade py36-plus

* Enable ruff C4 checks and fix issues

* Enable ruff ISC checks and fix issues

* Enable ruff SLOT checks and fix issues

* Enable ruff FLY checks and fix issues

* Enable some RET ruff checks and fix errors

* Enable PGH ruff checks and fix errors

* Fixed some more lint issues
pull/2050/head
Gulshan Singh 2 years ago committed by GitHub
parent 3e855ad220
commit 3d57de2eda
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -408,8 +408,8 @@ def _try2run_heap_command(function: Callable[..., str | None], a: Any, kw: Any)
)
if pwndbg.gdblib.config.exception_verbose or pwndbg.gdblib.config.exception_debugger:
raise err
else:
pwndbg.exception.inform_verbose_and_debug()
pwndbg.exception.inform_verbose_and_debug()
except Exception as err:
e(f"{function.__name__}: An unknown error occurred when running this command.")
if isinstance(pwndbg.heap.current, HeuristicHeap):
@ -420,8 +420,8 @@ def _try2run_heap_command(function: Callable[..., str | None], a: Any, kw: Any)
w("You can try `set resolve-heap-via-heuristic force` and re-run this command.\n")
if pwndbg.gdblib.config.exception_verbose or pwndbg.gdblib.config.exception_debugger:
raise err
else:
pwndbg.exception.inform_verbose_and_debug()
pwndbg.exception.inform_verbose_and_debug()
return None
@ -532,7 +532,7 @@ class _ArgparsedCommand(Command):
def split_args(self, argument: str):
argv = gdb.string_to_argv(argument)
return tuple(), vars(self.parser.parse_args(argv))
return (), vars(self.parser.parse_args(argv))
class ArgparsedCommand:

@ -294,8 +294,7 @@ def query_openai_chat(prompt, model="gpt-3.5-turbo", max_tokens=100, temperature
if "error" in res:
error_message = f"{res['error']['message']}: {res['error']['type']}"
raise Exception(error_message)
else:
raise Exception(res)
raise Exception(res)
if config.ai_show_usage:
print(
M.notice(
@ -334,8 +333,7 @@ def query_openai_completions(prompt, model="text-davinci-003", max_tokens=100, t
if "error" in res:
error_message = f"{res['error']['message']}: {res['error']['type']}"
raise Exception(error_message)
else:
raise Exception(res)
raise Exception(res)
reply = res["choices"][0]["text"]
if config.ai_show_usage:
print(

@ -67,7 +67,7 @@ def asm(shellcode, format, arch, avoid, infile) -> None:
assembly = pwnlib.asm.asm(" ".join(shellcode), arch=arch, bits=bits_for_arch)
if avoid:
avoid = map(lambda byte: str(byte), avoid)
avoid = (str(byte) for byte in avoid)
avoid = pwnlib.unhex("".join(avoid))
print(message.warn("Going to avoid these bytes in hex: " + avoid.hex(" ")))
assembly = pwnlib.encode(assembly, avoid)

@ -27,7 +27,7 @@ def print_row(
name = ljust_colored(name, ljust_optname + empty_space)
defval = extend_value_with_default(value, default)
defval = ljust_colored(defval, ljust_value + empty_space)
result = " ".join((name, defval, set_show_doc))
result = f"{name} {defval} {set_show_doc}"
print(result)
return result

@ -244,12 +244,12 @@ def contextoutput(section, path, clearing, banner="both", width=None):
if width is not None:
width = int(width)
outputs[section] = path
output_settings[section] = dict(
clearing=clearing,
width=width,
banner_top=banner in ["both", "top"],
banner_bottom=banner in ["both", "bottom"],
)
output_settings[section] = {
"clearing": clearing,
"width": width,
"banner_top": banner in ["both", "top"],
"banner_bottom": banner in ["both", "bottom"],
}
# Watches

@ -39,7 +39,7 @@ def read_chunk(addr):
val = pwndbg.gdblib.typeinfo.read_gdbvalue("struct malloc_chunk", addr)
else:
val = pwndbg.heap.current.malloc_chunk(addr)
return dict({renames.get(key, key): int(val[key]) for key in val.type.keys()})
return {renames.get(key, key): int(val[key]) for key in val.type.keys()}
def format_bin(bins: Bins, verbose=False, offset=None):
@ -719,8 +719,10 @@ parser.add_argument(
"-a",
action="store_true",
default=False,
help="Whether the fake chunk must be aligned to MALLOC_ALIGNMENT. This is required for tcache "
+ "chunks and for all chunks when Safe Linking is enabled",
help=(
"Whether the fake chunk must be aligned to MALLOC_ALIGNMENT. This is required for tcache "
"chunks and for all chunks when Safe Linking is enabled"
),
)
parser.add_argument(
"--glibc-fastbin-bug",

@ -14,7 +14,8 @@ import pwndbg.gdblib.symbol
from pwndbg.commands import CommandCategory
from pwndbg.gdblib.scheduler import parse_and_eval_with_scheduler_lock
errno.errorcode[0] = "OK" # type: ignore # manually add error code 0 for "OK"
# Manually add error code 0 for "OK"
errno.errorcode[0] = "OK" # type: ignore[index]
parser = argparse.ArgumentParser(
description="Converts errno (or argument) to its string representation."
@ -109,7 +110,7 @@ def pwndbg_(filter_pattern, shell, all_, category_, list_categories) -> None:
from tabulate import tabulate
table_data = defaultdict(lambda: [])
table_data = defaultdict(list)
for name, aliases, category, docs in list_and_filter_commands(
filter_pattern, pwndbg_cmds, shell_cmds
):

@ -30,11 +30,11 @@ parser.add_argument(
)
parser.add_argument(
"length",
help="Count of bytes to call mprotect on. Needs " "to be multiple of page size.",
help="Count of bytes to call mprotect on. Needs to be multiple of page size.",
type=int,
)
parser.add_argument(
"prot", help="Prot string as in mprotect(2). Eg. " '"PROT_READ|PROT_EXEC"', type=str
"prot", help='Prot string as in mprotect(2). Eg. "PROT_READ|PROT_EXEC"', type=str
)
SYS_MPROTECT = 0x7D

@ -21,9 +21,9 @@ from pwndbg.lib.regs import i386
# Offsets and names are from Linux kernel source. For example x86_64 is defined in CONFIG_X86_64 struct rt_sigframe (Linux Kernel /arch/x86/include/asm/sigframe.h)
SIGRETURN_FRAME_LAYOUTS: dict[str, list[Tuple[int, str]]] = {
"x86-64": sorted([(-8, "&pretcode")] + list(pwnlib.rop.srop.registers["amd64"].items())),
"i386": sorted(list(pwnlib.rop.srop.registers["i386"].items())),
"aarch64": sorted(list(pwnlib.rop.srop.registers["aarch64"].items())),
"arm": sorted(list(pwnlib.rop.srop.registers["arm"].items())),
"i386": sorted(pwnlib.rop.srop.registers["i386"].items()),
"aarch64": sorted(pwnlib.rop.srop.registers["aarch64"].items()),
"arm": sorted(pwnlib.rop.srop.registers["arm"].items()),
}
# Always print these registers (as well as flag register, eflags / cpsr)
@ -31,7 +31,7 @@ SIGRETURN_CORE_REGISTER: dict[str, set[str]] = {
"x86-64": {*amd64.gpr, amd64.frame, amd64.stack, amd64.pc},
"i386": {*i386.gpr, i386.frame, i386.stack, i386.pc},
"aarch64": {*aarch64.gpr, "sp", "pc"},
"arm": {*arm.gpr, "fp" "ip", "sp", "lr", "pc"},
"arm": {*arm.gpr, "fp", "ip", "sp", "lr", "pc"},
}

@ -154,7 +154,7 @@ def telescope(
count = max(math.ceil(count / ptrsize), 1)
# Map of address to register string
reg_values: DefaultDict[int, list[str]] = collections.defaultdict(lambda: [])
reg_values: DefaultDict[int, list[str]] = collections.defaultdict(list)
for reg in pwndbg.gdblib.regs.common:
reg_values[pwndbg.gdblib.regs[reg]].append(reg)

@ -101,7 +101,8 @@ def threads(num_threads, respect_config) -> None:
pwndbg.commands.context.config_max_threads_display
):
break
elif num_threads is not None and len(displayed_threads) >= num_threads:
if num_threads is not None and len(displayed_threads) >= num_threads:
break
if thread.is_valid() and thread is not original_thread:

@ -97,13 +97,13 @@ def vmmap(
total_pages = pwndbg.gdblib.vmmap.get()
# Filtered memory pages, indicated by an backtrace arrow in results
filtered_pages = list()
filtered_pages = []
# Only filter when -A and -B arguments are valid
if gdbval_or_str and lines_after >= 0 and lines_before >= 0:
# Find matching page in memory
filtered_pages = list(filter(pages_filter(gdbval_or_str), total_pages))
pages_to_display = list()
pages_to_display = []
for matched_page in filtered_pages:
# Append matched page

@ -106,14 +106,13 @@ class Parameter(gdb.Parameter):
def __get_set_string_gdb_le_9(self) -> str:
"""Handles the GDB `set <param>` command for GDB < 9"""
if (self.param.param_class == gdb.PARAM_ZUINTEGER and self.value < 0) or ( # type: ignore
self.param.param_class == gdb.PARAM_ZUINTEGER_UNLIMITED and self.value < -1 # type: ignore
if (self.param.param_class == gdb.PARAM_ZUINTEGER and self.value < 0) or ( # type: ignore[operator]
self.param.param_class == gdb.PARAM_ZUINTEGER_UNLIMITED and self.value < -1 # type: ignore[operator]
):
err = "integer %d out of range" % self.value # type: ignore
# Restore the old value
self.value = self.param.value
# GDB < 9 is too buggy, it won't handle `gdb.GdbError`..., so we return a string here
return err
return "integer %d out of range" % self.value # type: ignore[str-format]
# the logic after this line is the same as GDB >= 9
return self.__get_set_string_gdb_gte_9()

@ -252,19 +252,17 @@ class LinkMapEntry:
# Normally, only one entry for each tag is allowed to be present in the dynamic
# array for us to consider the dynamic array to be well-formed. Tags in this
# set are allowed to appear multiple times.
DYNAMIC_SECTION_ALLOW_MULTIPLE = set([elf.DT_NEEDED])
DYNAMIC_SECTION_ALLOW_MULTIPLE = {elf.DT_NEEDED}
# The DynamicSegment class expects some tags to always be present to function
# correctly. In this set we list them explicitly. Code in that class is allowed
# to presume these tags are always present after __init__.
DYNAMIC_SECTION_REQUIRED_TAGS = set(
[
elf.DT_STRTAB,
elf.DT_STRSZ,
elf.DT_SYMTAB,
elf.DT_SYMENT,
]
)
DYNAMIC_SECTION_REQUIRED_TAGS = {
elf.DT_STRTAB,
elf.DT_STRSZ,
elf.DT_SYMTAB,
elf.DT_SYMENT,
}
class DynamicSegment:

@ -48,6 +48,8 @@ class ELFInfo(namedtuple("ELFInfo", "header sections segments")):
ELF metadata and structures.
"""
__slots__ = ()
@property
def is_pic(self) -> bool:
return self.header["e_type"] == "ET_DYN"
@ -388,7 +390,7 @@ def map(pointer: int, objfile: str = "") -> Tuple[pwndbg.lib.memory.Page, ...]:
def map_inner(ei_class, ehdr, objfile: str) -> Tuple[pwndbg.lib.memory.Page, ...]:
if not ehdr:
return tuple()
return ()
base = int(ehdr.address)

@ -93,9 +93,7 @@ def get_file(path: str, try_local_path: bool = False) -> str:
real_error.append(line)
if len(real_error):
error = "\n".join(real_error)
raise OSError(
"Could not download remote file %r:\n" "Error: %s" % (path, error)
)
raise OSError("Could not download remote file %r:\nError: %s" % (path, error))
else:
print(
message.warn(

@ -78,30 +78,30 @@ class RelocTypes:
# Set of all type codes associated with jump slots, by architecture.
JUMP_SLOTS = {
"x86-64": set([RelocTypes.R_X86_64_JUMP_SLOT]),
"i386": set([RelocTypes.R_386_JMP_SLOT]),
"aarch64": set([RelocTypes.R_AARCH64_JUMP_SLOT]),
"mips": set([RelocTypes.R_MIPS_JUMP_SLOT]),
"powerpc": set([RelocTypes.R_PPC_JMP_SLOT]),
"sparc": set([RelocTypes.R_SPARC_JMP_SLOT]),
"arm": set([RelocTypes.R_ARM_JUMP_SLOT]),
"armcm": set([RelocTypes.R_ARM_JUMP_SLOT]),
"rv32": set([RelocTypes.R_RISCV_JUMP_SLOT]),
"rv64": set([RelocTypes.R_RISCV_JUMP_SLOT]),
"x86-64": {RelocTypes.R_X86_64_JUMP_SLOT},
"i386": {RelocTypes.R_386_JMP_SLOT},
"aarch64": {RelocTypes.R_AARCH64_JUMP_SLOT},
"mips": {RelocTypes.R_MIPS_JUMP_SLOT},
"powerpc": {RelocTypes.R_PPC_JMP_SLOT},
"sparc": {RelocTypes.R_SPARC_JMP_SLOT},
"arm": {RelocTypes.R_ARM_JUMP_SLOT},
"armcm": {RelocTypes.R_ARM_JUMP_SLOT},
"rv32": {RelocTypes.R_RISCV_JUMP_SLOT},
"rv64": {RelocTypes.R_RISCV_JUMP_SLOT},
}
# Set of all type codes associated with irelative jump slots, by architecture.
IRELATIVE_SLOTS = {
"x86-64": set([RelocTypes.R_X86_64_IRELATIVE]),
"i386": set([RelocTypes.R_386_IRELATIVE]),
"aarch64": set([RelocTypes.R_AARCH64_P32_IRELATIVE, RelocTypes.R_AARCH64_IRELATIVE]),
"mips": set([]),
"powerpc": set([RelocTypes.R_PPC_IRELATIVE]),
"sparc": set([RelocTypes.R_SPARC_IRELATIVE]),
"arm": set([RelocTypes.R_ARM_IRELATIVE]),
"armcm": set([RelocTypes.R_ARM_IRELATIVE]),
"rv32": set([RelocTypes.R_RISCV_IRELATIVE]),
"rv64": set([RelocTypes.R_RISCV_IRELATIVE]),
"x86-64": {RelocTypes.R_X86_64_IRELATIVE},
"i386": {RelocTypes.R_386_IRELATIVE},
"aarch64": {RelocTypes.R_AARCH64_P32_IRELATIVE, RelocTypes.R_AARCH64_IRELATIVE},
"mips": set(),
"powerpc": {RelocTypes.R_PPC_IRELATIVE},
"sparc": {RelocTypes.R_SPARC_IRELATIVE},
"arm": {RelocTypes.R_ARM_IRELATIVE},
"armcm": {RelocTypes.R_ARM_IRELATIVE},
"rv32": {RelocTypes.R_RISCV_IRELATIVE},
"rv64": {RelocTypes.R_RISCV_IRELATIVE},
}

@ -50,7 +50,7 @@ that were not made explicit.
from __future__ import annotations
import gdb
from sortedcontainers import SortedDict # type: ignore
from sortedcontainers import SortedDict # type: ignore # noqa: PGH003
import pwndbg.gdblib
from pwndbg.color import message
@ -180,8 +180,8 @@ class Tracker:
def __init__(self):
self.free_chunks = SortedDict()
self.alloc_chunks = SortedDict()
self.free_whatchpoints = dict()
self.memory_management_calls = dict()
self.free_whatchpoints = {}
self.memory_management_calls = {}
def is_performing_memory_management(self):
thread = gdb.selected_thread().global_num

@ -274,7 +274,7 @@ def run_onegadget() -> str:
cache_file = os.path.join(ONEGADGET_CACHEDIR, compute_file_hash(libc_path))
if os.path.exists(cache_file):
# Cache hit
with open(cache_file, "r") as f:
with open(cache_file) as f:
return f.read()
# Cache miss
output = subprocess.check_output(["one_gadget", "--level=100", libc_path], text=True)
@ -328,10 +328,12 @@ def check_stack_argv(expr: str) -> tuple[CheckSatResult, str]:
output_msg += f"argv[{n}] = {expr}\n"
n += 1
continue
elif expr == "NULL":
if expr == "NULL":
output_msg += f"argv[{n}] = {expr}\n"
return UNKNOWN, output_msg
elif expr == "...":
if expr == "...":
output_msg += f"argv doesn't end, please check argv[{n}..n] manually\n"
return UNKNOWN, output_msg

@ -142,7 +142,8 @@ class module(ModuleType):
):
if regset is None:
continue
elif isinstance(regset, (list, tuple)): # regs.retaddr
if isinstance(regset, (list, tuple)): # regs.retaddr
retval.extend(regset)
elif isinstance(regset, dict): # regs.flags
retval.extend(regset.keys())

@ -92,7 +92,7 @@ def get() -> Tuple[pwndbg.lib.memory.Page, ...]:
"""
# Note: debugging a coredump does still show proc.alive == True
if not pwndbg.gdblib.proc.alive:
return tuple()
return ()
if is_corefile():
return tuple(coredump_maps())
@ -283,7 +283,7 @@ def coredump_maps() -> Tuple[pwndbg.lib.memory.Page, ...]:
# ['[15]', '0x555555555000->0x555555556000', 'at', '0x00001430:', 'load2', 'ALLOC', 'LOAD', 'READONLY', 'CODE', 'HAS_CONTENTS']
try:
_idx, start_end, _at_str, _at, name, *flags_list = line.split()
start, end = map(lambda v: int(v, 16), start_end.split("->"))
start, end = (int(v, 16) for v in start_end.split("->"))
# Skip pages with start=0x0, this is unlikely this is valid vmmap
if start == 0:
@ -319,7 +319,7 @@ def coredump_maps() -> Tuple[pwndbg.lib.memory.Page, ...]:
pages.append(pwndbg.lib.memory.Page(start, end - start, flags, offset, name))
if not pages:
return tuple()
return ()
# If the last page starts on e.g. 0xffffffffff600000 it must be vsyscall
vsyscall_page = pages[-1]
@ -458,7 +458,7 @@ def proc_tid_maps() -> Tuple[pwndbg.lib.memory.Page, ...] | None:
# Process hasn't been fully created yet; it is in Z (zombie) state
if data == "":
return tuple()
return ()
pages: List[pwndbg.lib.memory.Page] = []
for line in data.splitlines():
@ -504,7 +504,8 @@ def kernel_vmmap_via_page_tables() -> Tuple[pwndbg.lib.memory.Page, ...]:
print(
M.error(
"Permission error when attempting to parse page tables with gdb-pt-dump.\n"
+ "Either change the kernel-vmmap setting, re-run GDB as root, or disable `ptrace_scope` (`echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`)"
"Either change the kernel-vmmap setting, re-run GDB as root, or disable "
"`ptrace_scope` (`echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`)"
)
)
return tuple(retpages)
@ -565,18 +566,18 @@ def kernel_vmmap_via_monitor_info_mem() -> Tuple[pwndbg.lib.memory.Page, ...]:
print(
M.error(
f"The {pwndbg.gdblib.arch.name} architecture does"
+ " not support the `monitor info mem` command. Run "
+ "`help show kernel-vmmap` for other options."
" not support the `monitor info mem` command. Run "
"`help show kernel-vmmap` for other options."
)
)
return tuple() # pylint: disable=lost-exception
return () # pylint: disable=lost-exception
lines = monitor_info_mem.splitlines()
# Handle disabled PG
# This will prevent a crash on abstract architectures
if len(lines) == 1 and lines[0] == "PG disabled":
return tuple()
return ()
pages: List[pwndbg.lib.memory.Page] = []
for line in lines:
@ -710,9 +711,7 @@ def info_files() -> Tuple[pwndbg.lib.memory.Page, ...]:
print("Bad data: %r" % line)
continue
if objfile in seen_files:
continue
else:
if objfile not in seen_files:
seen_files.add(objfile)
pages.extend(pwndbg.gdblib.elf.map(vaddr, objfile))
@ -736,7 +735,7 @@ def info_auxv(skip_exe: bool = False) -> Tuple[pwndbg.lib.memory.Page, ...]:
auxv = pwndbg.auxv.get()
if not auxv:
return tuple()
return ()
pages: List[pwndbg.lib.memory.Page] = []
exe_name = auxv.AT_EXECFN or "main.exe"

@ -1573,7 +1573,8 @@ class HeuristicHeap(GlibcMemoryAllocator):
# We only care about the relocation in .data section
if r_offset - next_field_offset < data_section_offset:
continue
elif r_offset - next_field_offset >= data_section_offset + size:
if r_offset - next_field_offset >= data_section_offset + size:
break
# To find addend:
@ -1789,7 +1790,7 @@ class HeuristicHeap(GlibcMemoryAllocator):
)
return None
raise SymbolUnresolvableError("thread_arena")
else:
else: # noqa: RET506
self._thread_arena_values[gdb.selected_thread().global_num] = self.main_arena.address
return self.main_arena

@ -134,7 +134,7 @@ class Parameter:
class Config:
def __init__(self) -> None:
self.params: Dict[str, Parameter] = {}
self.triggers: DefaultDict[str, List[Callable[..., Any]]] = defaultdict(lambda: [])
self.triggers: DefaultDict[str, List[Callable[..., Any]]] = defaultdict(list)
def add_param(
self,

@ -8,7 +8,7 @@ from typing import Optional
from typing import Tuple
from typing import Union
from pycparser import CParser # type: ignore
from pycparser import CParser # type: ignore # noqa: PGH003
from pycparser import c_ast
CAstNode = Union[

@ -54,12 +54,12 @@ class RegisterSet:
pc: str = "pc",
stack: str = "sp",
frame: str | None = None,
retaddr: Tuple[str, ...] = tuple(),
retaddr: Tuple[str, ...] = (),
flags: Dict[str, BitFlags] = {},
extra_flags: Dict[str, BitFlags] = {},
gpr: Tuple[str, ...] = tuple(),
misc: Tuple[str, ...] = tuple(),
args: Tuple[str, ...] = tuple(),
gpr: Tuple[str, ...] = (),
misc: Tuple[str, ...] = (),
args: Tuple[str, ...] = (),
retval: str | None = None,
) -> None:
self.pc = pc
@ -79,9 +79,7 @@ class RegisterSet:
if reg and reg not in self.common:
self.common.append(reg)
self.all = (
{i for i in misc} | set(flags) | set(extra_flags) | set(self.retaddr) | set(self.common)
)
self.all = set(misc) | set(flags) | set(extra_flags) | set(self.retaddr) | set(self.common)
self.all -= {None}
def __iter__(self) -> Iterator[str]:

@ -18,9 +18,7 @@ T = TypeVar("T")
class OnlyWithCommand:
def __init__(self, *commands: str | List[str]) -> None:
self.all_cmds: List[str] = list(
map(lambda cmd: cmd[0] if isinstance(cmd, list) else cmd, commands)
)
self.all_cmds: List[str] = [cmd[0] if isinstance(cmd, list) else cmd for cmd in commands]
for command in commands:
self.cmd: List[str] = command if isinstance(command, list) else [command]
self.cmd_path: str | None = which(self.cmd[0])

@ -18,10 +18,23 @@ ignore = [
]
select = [
"A",
"E",
"F",
"W",
"A", # flake8-builtins
"E", # pycodestyle
"F", # pyflakes
"W", # pycodestyle
"C4", # flake8-comprehensions
"ISC", # flake8-implicit-str-concat
"SLOT", # flake8-slots
"FLY", # flynt
"PGH", # pygrep-hooks
"RET506", # flake8-return: superfluous-else-raise
"RET507", # flake8-return: superfluous-else-continue
"RET508", # flake8-return: superfluous-else-break
# We want to enable the below lints, but they currently return too many errors
# "RET505", # flake8-return: superfluous-else-return
# "SLF" # flake8-self
# "SIM", # flake8-simplify
# "PTH", # flake8-use-pathlib
]
[tool.ruff.lint.flake8-builtins]

@ -34,7 +34,7 @@ def get_proc_maps():
with open("/proc/%d/maps" % pwndbg.gdblib.proc.pid) as f:
for line in f.read().splitlines():
addrs, perms, offset, _inode, size, objfile = line.split(maxsplit=6)
start, end = map(lambda v: int(v, 16), addrs.split("-"))
start, end = (int(v, 16) for v in addrs.split("-"))
offset = offset.lstrip("0") or "0"
size = end - start
maps.append([hex(start), hex(end), perms, hex(size)[2:], offset, objfile])

@ -44,13 +44,13 @@ def test_loads_binary_with_core_without_crashing():
assert h in output
lwp_line = re.compile(r"^\[New LWP \d+\]$")
assert any([lwp_line.match(line) for line in output])
assert any(lwp_line.match(line) for line in output)
binary_line = re.compile("^Core was generated by .+$")
assert any([binary_line.match(line) for line in output])
assert any(binary_line.match(line) for line in output)
crash_address_line = re.compile(r"^#0 0x[0-9a-fA-F]+ in main .*$")
assert any([crash_address_line.match(line) for line in output])
assert any(crash_address_line.match(line) for line in output)
def test_loads_core_without_crashing():
@ -68,13 +68,13 @@ def test_loads_core_without_crashing():
assert all(item in output for item in expected)
lwp_line = re.compile(r"^\[New LWP \d+\]$")
assert any([lwp_line.match(line) for line in output])
assert any(lwp_line.match(line) for line in output)
binary_line = re.compile("^Core was generated by .+$")
assert any([binary_line.match(line) for line in output])
assert any(binary_line.match(line) for line in output)
crash_address_line = re.compile(r"^#0 0x[0-9a-fA-F]+ in \?\? \(\)$")
assert any([crash_address_line.match(line) for line in output])
assert any(crash_address_line.match(line) for line in output)
def test_entry_no_file_loaded():

@ -195,7 +195,7 @@ def test_windbg_dX_commands(start_binary):
)
assert gdb.execute("dc data 3", to_string=True) == (
"+0000 0x400081 00 00 00 │... " " │ │\n"
"+0000 0x400081 00 00 00 │... │ │\n"
)
#################################################
@ -209,7 +209,7 @@ def test_windbg_dX_commands(start_binary):
# Check too low maxlen
assert gdb.execute("ds short_str 5", to_string=True) == (
"Max str len of 5 too low, changing to 256\n" "4000d9 'some cstring here'\n"
"Max str len of 5 too low, changing to 256\n4000d9 'some cstring here'\n"
)
# Check output for a string longer than (the default) maxlen of 256

Loading…
Cancel
Save