Merge pull request #44371 from RevoluPowered/fix-parser-crash-mesh-geometry

[fbx] fix crash in FBX parser caused by mesh geometry
This commit is contained in:
Rémi Verschelde 2020-12-15 13:04:08 +01:00 committed by GitHub
commit 6956b01cb5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 11 additions and 6 deletions

View file

@ -128,7 +128,7 @@ Node *EditorSceneImporterFBX::import_scene(const String &p_path, uint32_t p_flag
FBXDocParser::TokenizeBinary(tokens, (const char *)data.write().ptr(), (size_t)data.size());
} else {
print_verbose("[doc] is ascii");
FBXDocParser::Tokenize(tokens, (const char *)data.write().ptr());
FBXDocParser::Tokenize(tokens, (const char *)data.write().ptr(), (size_t)data.size());
}
// The import process explained:

View file

@ -182,7 +182,7 @@ MeshGeometry::MeshGeometry(uint64_t id, const ElementPtr element, const std::str
// This is stupid, because it means we select them ALL not just the one we want.
// but it's fine we can match by id.
GetRequiredElement(top, layer_type_name);
const ElementCollection &candidates = top->GetCollection(layer_type_name);
ElementMap::const_iterator iter;

View file

@ -142,7 +142,7 @@ void ProcessDataToken(TokenList &output_tokens, const char *&start, const char *
} // namespace
// ------------------------------------------------------------------------------------------------
void Tokenize(TokenList &output_tokens, const char *input) {
void Tokenize(TokenList &output_tokens, const char *input, size_t length) {
// line and column numbers numbers are one-based
unsigned int line = 1;
unsigned int column = 1;
@ -152,8 +152,13 @@ void Tokenize(TokenList &output_tokens, const char *input) {
bool pending_data_token = false;
const char *token_begin = nullptr, *token_end = nullptr;
for (const char *cur = input; *cur; column += (*cur == '\t' ? ASSIMP_FBX_TAB_WIDTH : 1), ++cur) {
const char c = *cur;
// input (starting string), *cur the current string, column +=
// modified to fix strlen() and stop buffer overflow
for (size_t x = 0; x < length; x++) {
const char c = input[x];
const char *cur = &input[x];
column += (c == '\t' ? ASSIMP_FBX_TAB_WIDTH : 1);
if (IsLineEnd(c)) {
comment = false;

View file

@ -187,7 +187,7 @@ typedef std::vector<TokenPtr> TokenList;
* @param output_tokens Receives a list of all tokens in the input data.
* @param input_buffer Textual input buffer to be processed, 0-terminated.
* @print_error if something goes wrong */
void Tokenize(TokenList &output_tokens, const char *input);
void Tokenize(TokenList &output_tokens, const char *input, size_t length);
/** Tokenizer function for binary FBX files.
*