Parcourir la source

Merge pull request #515 from assimp/regression-suite-fixes

Regression suite fixes II
Alexander Gessler il y a 10 ans
Parent
commit
f96b49a112

+ 1 - 1
code/XGLLoader.cpp

@@ -87,7 +87,7 @@ static const aiImporterDesc desc = {
 	"",
 	"",
 	"",
-	aiImporterFlags_SupportTextFlavour,
+	aiImporterFlags_SupportTextFlavour | aiImporterFlags_SupportCompressedFlavour,
 	0,
 	0,
 	0,

BIN
test/regression/db.zip


+ 2 - 2
test/regression/gen_db.py

@@ -164,7 +164,7 @@ def gen_db(ext_list,outfile):
     num = 0
     for tp in settings.model_directories:
         num += process_dir(tp, outfile,
-            lambda x: os.path.splitext(x)[1] in ext_list)
+            lambda x: os.path.splitext(x)[1].lower() in ext_list and not x in settings.files_to_ignore)
 
     print("="*60)
     print("Updated {0} entries".format(num))
@@ -201,7 +201,7 @@ if __name__ == "__main__":
     if ext_list is None:
         (ext_list, err) = subprocess.Popen([assimp_bin_path, "listext"],
             stdout=subprocess.PIPE).communicate()
-        ext_list = str(ext_list).lower().split(";")
+        ext_list = str(ext_list.strip()).lower().split(";")
 
     # todo: Fix for multi dot extensions like .skeleton.xml
     ext_list = list(filter(lambda f: not f in settings.exclude_extensions,

+ 9 - 2
test/regression/run.py

@@ -65,7 +65,7 @@ import utils
 # -------------------------------------------------------------------------------
 EXPECTED_FAILURE_NOT_MET, DATABASE_LENGTH_MISMATCH, \
 DATABASE_VALUE_MISMATCH, IMPORT_FAILURE, \
-FILE_NOT_READABLE, COMPARE_SUCCESS = range(6)
+FILE_NOT_READABLE, COMPARE_SUCCESS, EXPECTED_FAILURE = range(7)
 
 messages = collections.defaultdict(lambda: "<unknown", {
         EXPECTED_FAILURE_NOT_MET:
@@ -88,7 +88,10 @@ messages = collections.defaultdict(lambda: "<unknown", {
 
         COMPARE_SUCCESS:
 """Results match archived reference dump in database\n\
-\tNumber of bytes compared: {0}"""
+\tNumber of bytes compared: {0}""",
+
+        EXPECTED_FAILURE:
+"""Expected failure was met.""",
 })
 
 outfilename_output = "run_regression_suite_output.txt"
@@ -211,6 +214,10 @@ def process_dir(d, outfile_results, zipin, result):
                 result.fail(fullpath, outfile_expect, pppreset, EXPECTED_FAILURE_NOT_MET)
                 outfile_results.write("Expected import to fail\n")
                 continue
+            elif failure and r:
+                result.ok(fullpath, pppreset, EXPECTED_FAILURE) 
+                outfile_results.write("Failed as expected, skipping.\n")
+                continue
             
             with open(outfile_expect, "wb") as s:
                 s.write(input_expected) 

+ 2 - 2
test/regression/settings.py

@@ -60,9 +60,9 @@ files_to_ignore = ["pond.0.ply"]
 # File extensions are case insensitive
 # -------------------------------------------------------------------------------
 exclude_extensions = [
-    ".lws", ".assbin", ".assxml", ".txt", ".md",
+    ".assbin", ".assxml", ".txt", ".md",
     ".jpeg", ".jpg", ".png", ".gif", ".tga", ".bmp",
-    ".skeleton", ".skeleton.xml", ".license"
+    ".skeleton", ".skeleton.xml", ".license", ".mtl", ".material"
 ]
 
 # -------------------------------------------------------------------------------

+ 2 - 4
test/regression/utils.py

@@ -50,15 +50,13 @@ def hashing(file,pp):
     needs to be persistent across different python implementations
     and platforms, so we implement the hashing manually.
     """
-
+    file = file.lower()
     file = file.replace('\\','/')+":"+pp
     # SDBM hash
     res = 0
     for t in file:
         res = (ord(t) + (res<<6) + (res<<16) - res) % 2**32
-
-    # Python 2.7 normalization: strip 'L' suffix.
-    return hex(res).rstrip('L')
+    return '{:x}'.format(res)
 
 
  # vim: ai ts=4 sts=4 et sw=4

+ 14 - 7
tools/assimp_cmd/CompareDump.cpp

@@ -134,15 +134,16 @@ public:
 		if(it != history.back().second.end()) {
 			++history.back().second[s];
 		}
-		else history.back().second[s] = 1;
+		else history.back().second[s] = 0;
 
 		history.push_back(HistoryEntry(s,PerChunkCounter()));
-
+		debug_trace.push_back("PUSH " + s);
 	}
 
 	/* leave current scope */
 	void pop_elem() {
 		ai_assert(history.size());
+		debug_trace.push_back("POP "+ history.back().first);
 		history.pop_back();
 	}
 
@@ -243,18 +244,22 @@ private:
 		const char* last = history.back().first.c_str();
 		std::string pad;
 
-		for(ChunkHistory::reverse_iterator rev = ++history.rbegin(),
-			end = history.rend(); rev < end; ++rev, pad += "  ")
+		for(ChunkHistory::reverse_iterator rev = history.rbegin(),
+			end = history.rend(); rev != end; ++rev, pad += "  ")
 		{
-			ss << pad << (*rev).first << "(Index: " << (*rev).second[last]-1 << ")" << std::endl;
+			ss << pad << (*rev).first << "(Index: " << (*rev).second[last] << ")" << std::endl;
 			last = (*rev).first.c_str();
 		}
 
+		ss << std::endl << "Debug trace: "<< std::endl;
+		for (std::vector<std::string>::const_iterator it = debug_trace.begin(); it != debug_trace.end(); ++it) {
+			ss << *it << std::endl;
+		}
 		return ss.str();
 	}
 
 
-	/* read from both streams simult.*/
+	/* read from both streams at the same time */
 	template <typename T> void read(T& filla,T& fille) {
 		if(1 != fread(&filla,sizeof(T),1,actual)) {
 			EOFActual();
@@ -291,6 +296,8 @@ private:
 	typedef std::deque<HistoryEntry> ChunkHistory;
 	ChunkHistory history;
 
+	std::vector<std::string> debug_trace;
+
 	typedef std::stack<std::pair<uint32_t,uint32_t> > LengthStack;
 	LengthStack lengths;
 
@@ -739,7 +746,7 @@ void CompareOnTheFlyLight(comparer_context& comp)	{
 	const aiLightSourceType type = static_cast<aiLightSourceType>( 
 		comp.cmp<uint32_t>("mType"));
 
-	if(type==aiLightSource_DIRECTIONAL) {
+	if(type!=aiLightSource_DIRECTIONAL) {
 		comp.cmp<float>("mAttenuationConstant");
 		comp.cmp<float>("mAttenuationLinear");
 		comp.cmp<float>("mAttenuationQuadratic");