瀏覽代碼

Support for FBX file sizes more than 4GB

Mike Samsonov 6 年之前
父節點
當前提交
c58ed8da22
共有 5 個文件被更改,包括 15 次插入15 次删除
  1. 6 6
      code/FBXBinaryTokenizer.cpp
  2. 1 1
      code/FBXImporter.cpp
  3. 6 6
      code/FBXTokenizer.h
  4. 1 1
      code/FBXUtil.cpp
  5. 1 1
      code/FBXUtil.h

+ 6 - 6
code/FBXBinaryTokenizer.cpp

@@ -98,7 +98,7 @@ namespace FBX {
 //	return (flags & to_check) != 0;
 //	return (flags & to_check) != 0;
 //}
 //}
 // ------------------------------------------------------------------------------------------------
 // ------------------------------------------------------------------------------------------------
-Token::Token(const char* sbegin, const char* send, TokenType type, unsigned int offset)
+Token::Token(const char* sbegin, const char* send, TokenType type, size_t offset)
     :
     :
     #ifdef DEBUG
     #ifdef DEBUG
     contents(sbegin, static_cast<size_t>(send-sbegin)),
     contents(sbegin, static_cast<size_t>(send-sbegin)),
@@ -122,18 +122,18 @@ namespace {
 
 
 // ------------------------------------------------------------------------------------------------
 // ------------------------------------------------------------------------------------------------
 // signal tokenization error, this is always unrecoverable. Throws DeadlyImportError.
 // signal tokenization error, this is always unrecoverable. Throws DeadlyImportError.
-AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offset) AI_WONT_RETURN_SUFFIX;
-AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offset)
+AI_WONT_RETURN void TokenizeError(const std::string& message, size_t offset) AI_WONT_RETURN_SUFFIX;
+AI_WONT_RETURN void TokenizeError(const std::string& message, size_t offset)
 {
 {
     throw DeadlyImportError(Util::AddOffset("FBX-Tokenize",message,offset));
     throw DeadlyImportError(Util::AddOffset("FBX-Tokenize",message,offset));
 }
 }
 
 
 
 
 // ------------------------------------------------------------------------------------------------
 // ------------------------------------------------------------------------------------------------
-uint32_t Offset(const char* begin, const char* cursor) {
+size_t Offset(const char* begin, const char* cursor) {
     ai_assert(begin <= cursor);
     ai_assert(begin <= cursor);
 
 
-    return static_cast<unsigned int>(cursor - begin);
+    return cursor - begin;
 }
 }
 
 
 // ------------------------------------------------------------------------------------------------
 // ------------------------------------------------------------------------------------------------
@@ -424,7 +424,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
 
 
 // ------------------------------------------------------------------------------------------------
 // ------------------------------------------------------------------------------------------------
 // TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
 // TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
-void TokenizeBinary(TokenList& output_tokens, const char* input, unsigned int length)
+void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
 {
 {
     ai_assert(input);
     ai_assert(input);
 
 

+ 1 - 1
code/FBXImporter.cpp

@@ -172,7 +172,7 @@ void FBXImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
         bool is_binary = false;
         bool is_binary = false;
         if (!strncmp(begin,"Kaydara FBX Binary",18)) {
         if (!strncmp(begin,"Kaydara FBX Binary",18)) {
             is_binary = true;
             is_binary = true;
-            TokenizeBinary(tokens,begin,static_cast<unsigned int>(contents.size()));
+            TokenizeBinary(tokens,begin,contents.size());
         }
         }
         else {
         else {
             Tokenize(tokens,begin);
             Tokenize(tokens,begin);

+ 6 - 6
code/FBXTokenizer.h

@@ -93,7 +93,7 @@ public:
     Token(const char* sbegin, const char* send, TokenType type, unsigned int line, unsigned int column);
     Token(const char* sbegin, const char* send, TokenType type, unsigned int line, unsigned int column);
 
 
     /** construct a binary token */
     /** construct a binary token */
-    Token(const char* sbegin, const char* send, TokenType type, unsigned int offset);
+    Token(const char* sbegin, const char* send, TokenType type, size_t offset);
 
 
     ~Token();
     ~Token();
 
 
@@ -118,14 +118,14 @@ public:
         return type;
         return type;
     }
     }
 
 
-    unsigned int Offset() const {
+    size_t Offset() const {
         ai_assert(IsBinary());
         ai_assert(IsBinary());
         return offset;
         return offset;
     }
     }
 
 
     unsigned int Line() const {
     unsigned int Line() const {
         ai_assert(!IsBinary());
         ai_assert(!IsBinary());
-        return line;
+        return static_cast<unsigned int>(line);
     }
     }
 
 
     unsigned int Column() const {
     unsigned int Column() const {
@@ -147,8 +147,8 @@ private:
     const TokenType type;
     const TokenType type;
 
 
     union {
     union {
-        const unsigned int line;
-        unsigned int offset;
+        size_t line;
+        size_t offset;
     };
     };
     const unsigned int column;
     const unsigned int column;
 };
 };
@@ -178,7 +178,7 @@ void Tokenize(TokenList& output_tokens, const char* input);
  * @param input_buffer Binary input buffer to be processed.
  * @param input_buffer Binary input buffer to be processed.
  * @param length Length of input buffer, in bytes. There is no 0-terminal.
  * @param length Length of input buffer, in bytes. There is no 0-terminal.
  * @throw DeadlyImportError if something goes wrong */
  * @throw DeadlyImportError if something goes wrong */
-void TokenizeBinary(TokenList& output_tokens, const char* input, unsigned int length);
+void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length);
 
 
 
 
 } // ! FBX
 } // ! FBX

+ 1 - 1
code/FBXUtil.cpp

@@ -86,7 +86,7 @@ const char* TokenTypeString(TokenType t)
 
 
 
 
 // ------------------------------------------------------------------------------------------------
 // ------------------------------------------------------------------------------------------------
-std::string AddOffset(const std::string& prefix, const std::string& text, unsigned int offset)
+std::string AddOffset(const std::string& prefix, const std::string& text, size_t offset)
 {
 {
     return static_cast<std::string>( (Formatter::format() << prefix << " (offset 0x" << std::hex << offset << ") " << text) );
     return static_cast<std::string>( (Formatter::format() << prefix << " (offset 0x" << std::hex << offset << ") " << text) );
 }
 }

+ 1 - 1
code/FBXUtil.h

@@ -78,7 +78,7 @@ const char* TokenTypeString(TokenType t);
  *  @param line Line index, 1-based
  *  @param line Line index, 1-based
  *  @param column Column index, 1-based
  *  @param column Column index, 1-based
  *  @return A string of the following format: {prefix} (offset 0x{offset}) {text}*/
  *  @return A string of the following format: {prefix} (offset 0x{offset}) {text}*/
-std::string AddOffset(const std::string& prefix, const std::string& text, unsigned int offset);
+std::string AddOffset(const std::string& prefix, const std::string& text, size_t offset);
 
 
 
 
 /** Format log/error messages using a given line location in the source file.
 /** Format log/error messages using a given line location in the source file.