From 4ee9854be5218b504fd4c1bc502ba9503caf021f Mon Sep 17 00:00:00 2001 From: Zequan Wu Date: Wed, 24 Nov 2021 15:18:45 -0800 Subject: [PATCH] Limit Tokenize max_tokens to 512 when parsing INLINE record. This is a more practical reserved capacity than std::numeric_limits::max() for the vector. Change-Id: Ic8d4e812c3804e4f15cc51650f7a91bae7313415 Reviewed-on: https://chromium-review.googlesource.com/c/breakpad/breakpad/+/3301419 Reviewed-by: Joshua Peraza Reviewed-by: Lei Zhang --- src/processor/basic_source_line_resolver.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/processor/basic_source_line_resolver.cc b/src/processor/basic_source_line_resolver.cc index 2e05d72a..5dc73801 100644 --- a/src/processor/basic_source_line_resolver.cc +++ b/src/processor/basic_source_line_resolver.cc @@ -730,7 +730,8 @@ bool SymbolParseHelper::ParseInline( inline_line += 7; // skip prefix vector tokens; - Tokenize(inline_line, kWhitespace, std::numeric_limits::max(), &tokens); + // Increase max_tokens if necessary. + Tokenize(inline_line, kWhitespace, 512, &tokens); // Determine the version of INLINE record by parity of the vector length. *has_call_site_file_id = tokens.size() % 2 == 0;