Limit Tokenize max_tokens to 512 when parsing INLINE record.

This is a more practical reserved capacity than
std::numeric_limits<int>::max() for the vector.

Change-Id: Ic8d4e812c3804e4f15cc51650f7a91bae7313415
Reviewed-on: https://chromium-review.googlesource.com/c/breakpad/breakpad/+/3301419
Reviewed-by: Joshua Peraza <jperaza@chromium.org>
Reviewed-by: Lei Zhang <thestig@chromium.org>
This commit is contained in:
Zequan Wu 2021-11-24 15:18:45 -08:00 committed by Joshua Peraza
parent 998a0a480c
commit 4ee9854be5

View File

@ -730,7 +730,8 @@ bool SymbolParseHelper::ParseInline(
inline_line += 7; // skip prefix
vector<char*> tokens;
Tokenize(inline_line, kWhitespace, std::numeric_limits<int>::max(), &tokens);
// Increase max_tokens if necessary.
Tokenize(inline_line, kWhitespace, 512, &tokens);
// Determine the version of INLINE record by parity of the vector length.
*has_call_site_file_id = tokens.size() % 2 == 0;