|
@@ -96,7 +96,8 @@ AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int line,
|
|
|
|
|
|
// process a potential data token up to 'cur', adding it to 'output_tokens'.
|
|
|
// ------------------------------------------------------------------------------------------------
|
|
|
-void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*& end,
|
|
|
+void ProcessDataToken(TokenList &output_tokens, StackAllocator &token_allocator,
|
|
|
+ const char*& start, const char*& end,
|
|
|
unsigned int line,
|
|
|
unsigned int column,
|
|
|
TokenType type = TokenType_DATA,
|
|
@@ -133,8 +134,7 @@ void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*
|
|
|
}
|
|
|
|
|
|
// ------------------------------------------------------------------------------------------------
|
|
|
-void Tokenize(TokenList& output_tokens, const char* input)
|
|
|
-{
|
|
|
+void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &token_allocator) {
|
|
|
ai_assert(input);
|
|
|
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
|
|
|
|
|
@@ -166,7 +166,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
|
|
in_double_quotes = false;
|
|
|
token_end = cur;
|
|
|
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
|
|
pending_data_token = false;
|
|
|
}
|
|
|
continue;
|
|
@@ -183,30 +183,30 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
|
|
continue;
|
|
|
|
|
|
case ';':
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
|
|
comment = true;
|
|
|
continue;
|
|
|
|
|
|
case '{':
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end, line, column);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
|
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
|
|
|
continue;
|
|
|
|
|
|
case '}':
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
|
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
|
|
|
continue;
|
|
|
|
|
|
case ',':
|
|
|
if (pending_data_token) {
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_DATA,true);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_DATA, true);
|
|
|
}
|
|
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
|
|
|
continue;
|
|
|
|
|
|
case ':':
|
|
|
if (pending_data_token) {
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_KEY,true);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_KEY, true);
|
|
|
}
|
|
|
else {
|
|
|
TokenizeError("unexpected colon", line, column);
|
|
@@ -228,7 +228,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- ProcessDataToken(output_tokens,token_begin,token_end,line,column,type);
|
|
|
+ ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, type);
|
|
|
}
|
|
|
|
|
|
pending_data_token = false;
|