VerifierHelper.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837
  1. # Copyright (C) Microsoft Corporation. All rights reserved.
  2. # This file is distributed under the University of Illinois Open Source License. See LICENSE.TXT for details.
  3. r"""VerifierHelper.py - help with test content used with:
  4. clang-hlsl-tests /name:VerifierTest.*
  5. This script will produce an HLSL file with expected-error and expected-warning
  6. statements corresponding to actual errors/warnings produced from clang-hlsl-tests.
  7. The new file will be located in %TEMP%, named after the original file, but with
  8. the added extension '.result'.
  9. This can then be compared with the original file (such as varmods-syntax.hlsl)
  10. to see the differences in errors. It may also be used to replace the original
  11. file, once the correct output behavior is verified.
  12. This script can also be used to do the same with fxc, adding expected errors there too.
  13. If there were errors/warnings/notes reported by clang, but nothing reported by fxc, an
  14. "fxc-pass {{}}" entry will be added. If copied to reference, it means that you sign
  15. off on the difference in behavior between clang and fxc.
  16. In ast mode, this will find the ast subtree corresponding to a line of code preceding
  17. a line containing only: "/*verify-ast", and insert a stripped subtree between this marker
  18. and a line containing only: "*/". This relies on clang.exe in the build directory.
  19. This tool expects clang.exe and clang-hlsl-tests.dll to be in %HLSL_BLD_DIR%\bin\Debug.
  20. Usage:
  21. VerifierHelper.py clang <testname> - run test through clang-hlsl-tests and show differences
  22. VerifierHelper.py fxc <testname> - run test through fxc and show differences
  23. VerifierHelper.py ast <testname> - run test through ast-dump and show differences
  24. VerifierHelper.py all <testname> - run test through clang-hlsl-tests, ast-dump, and fxc, then show differences
  25. <testname> - name of verifier test as passed to "te clang-hlsl-tests.dll /name:VerifierTest::<testname>":
  26. Example: RunVarmodsSyntax
  27. Can also specify * to run all tests
  28. Environment variables - set these to ensure this tool works properly:
  29. HLSL_SRC_DIR - root path of HLSLonLLVM enlistment
  30. HLSL_BLD_DIR - path to projects and build output
  31. HLSL_FXC_PATH - fxc.exe to use for comparison purposes
  32. HLSL_DIFF_TOOL - tool to use for file comparison (optional)
  33. """
  34. import os, sys, re
  35. try: DiffTool = os.environ['HLSL_DIFF_TOOL']
  36. except: DiffTool = None
  37. try: FxcPath = os.environ['HLSL_FXC_PATH']
  38. except: FxcPath = 'fxc'
  39. HlslVerifierTestCpp = os.path.expandvars(r'${HLSL_SRC_DIR}\tools\clang\unittests\HLSL\VerifierTest.cpp')
  40. HlslDataDir = os.path.expandvars(r'${HLSL_SRC_DIR}\tools\clang\test\HLSL')
  41. HlslBinDir = os.path.expandvars(r'${HLSL_BLD_DIR}\Debug\bin')
  42. VerifierTests = {
  43. 'RunArrayIndexOutOfBounds': 'array-index-out-of-bounds-HV-2016.hlsl',
  44. 'RunArrayLength': 'array-length.hlsl',
  45. 'RunAttributes': 'attributes.hlsl',
  46. 'RunBadInclude': 'bad-include.hlsl',
  47. 'RunBinopDims': 'binop-dims.hlsl',
  48. 'RunBuiltinTypesNoInheritance': 'builtin-types-no-inheritance.hlsl',
  49. 'RunCXX11Attributes': 'cxx11-attributes.hlsl',
  50. 'RunConstAssign': 'const-assign.hlsl',
  51. 'RunConstDefault': 'const-default.hlsl',
  52. 'RunConstExpr': 'const-expr.hlsl',
  53. 'RunConversionsBetweenTypeShapes': 'conversions-between-type-shapes.hlsl',
  54. 'RunConversionsNonNumericAggregates': 'conversions-non-numeric-aggregates.hlsl',
  55. 'RunCppErrors': 'cpp-errors.hlsl',
  56. 'RunCppErrorsHV2015': 'cpp-errors-hv2015.hlsl',
  57. 'RunDerivedToBaseCasts': 'derived-to-base.hlsl',
  58. 'RunEffectsSyntax': 'effects-syntax.hlsl',
  59. 'RunEnums': 'enums.hlsl',
  60. 'RunFunctions': 'functions.hlsl',
  61. 'RunImplicitCasts': 'implicit-casts.hlsl',
  62. 'RunIncompleteArray': 'incomp_array_err.hlsl',
  63. 'RunIncompleteType': 'incomplete-type.hlsl',
  64. 'RunIndexingOperator': 'indexing-operator.hlsl',
  65. 'RunIntrinsicExamples': 'intrinsic-examples.hlsl',
  66. 'RunLiterals': 'literals.hlsl',
  67. 'RunMatrixAssignments': 'matrix-assignments.hlsl',
  68. 'RunMatrixSyntax': 'matrix-syntax.hlsl',
  69. 'RunMatrixSyntaxExactPrecision': 'matrix-syntax-exact-precision.hlsl',
  70. 'RunMintypesPromotionWarnings': 'mintypes-promotion-warnings.hlsl',
  71. 'RunMoreOperators': 'more-operators.hlsl',
  72. 'RunObjectOperators': 'object-operators.hlsl',
  73. 'RunPackReg': 'packreg.hlsl',
  74. 'RunRayTracings': "raytracing.hlsl",
  75. 'RunScalarAssignments': 'scalar-assignments.hlsl',
  76. 'RunScalarAssignmentsExactPrecision': 'scalar-assignments-exact-precision.hlsl',
  77. 'RunScalarOperators': 'scalar-operators.hlsl',
  78. 'RunScalarOperatorsAssign': 'scalar-operators-assign.hlsl',
  79. 'RunScalarOperatorsAssignExactPrecision': 'scalar-operators-assign-exact-precision.hlsl',
  80. 'RunScalarOperatorsExactPrecision': 'scalar-operators-exact-precision.hlsl',
  81. 'RunSemantics': 'semantics.hlsl',
  82. 'RunSizeof': 'sizeof.hlsl',
  83. 'RunString': 'string.hlsl',
  84. 'RunStructAssignments': 'struct-assignments.hlsl',
  85. 'RunSubobjects': 'subobjects-syntax.hlsl',
  86. 'RunTemplateChecks': 'template-checks.hlsl',
  87. 'RunTypemodsSyntax': 'typemods-syntax.hlsl',
  88. 'RunUint4Add3': 'uint4_add3.hlsl',
  89. 'RunVarmodsSyntax': 'varmods-syntax.hlsl',
  90. 'RunVectorAssignments': 'vector-assignments.hlsl',
  91. 'RunVectorConditional': 'vector-conditional.hlsl',
  92. 'RunVectorSyntax': 'vector-syntax.hlsl',
  93. 'RunVectorSyntaxExactPrecision': 'vector-syntax-exact-precision.hlsl',
  94. 'RunVectorSyntaxMix': 'vector-syntax-mix.hlsl',
  95. 'RunWave': 'wave.hlsl',
  96. }
  97. # The following test(s) do not work in fxc mode:
  98. fxcExcludedTests = [
  99. 'RunCppErrors',
  100. 'RunCppErrorsHV2015',
  101. 'RunCXX11Attributes',
  102. 'RunEnums',
  103. 'RunIncompleteType',
  104. 'RunIntrinsicExamples',
  105. 'RunMatrixSyntaxExactPrecision',
  106. 'RunRayTracings',
  107. 'RunScalarAssignmentsExactPrecision',
  108. 'RunScalarOperatorsAssignExactPrecision',
  109. 'RunScalarOperatorsExactPrecision',
  110. 'RunSubobjects',
  111. 'RunVectorSyntaxExactPrecision',
  112. 'RunWave',
  113. ]
  114. # rxRUN = re.compile(r'[ RUN ] VerifierTest.(\w+)') # gtest syntax
  115. rxRUN = re.compile(r'StartGroup: VerifierTest::(\w+)') # TAEF syntax
  116. rxEndGroup = re.compile(r'EndGroup: VerifierTest::(\w+)\s+\[(\w+)\]') # TAEF syntax
  117. rxForProgram = re.compile(r'^for program (.*?) with errors\:$')
  118. # rxExpected = re.compile(r"^error\: \'(\w+)\' diagnostics (expected but not seen|seen but not expected)\: $") # gtest syntax
  119. rxExpected = re.compile(r"^\'(\w+)\' diagnostics (expected but not seen|seen but not expected)\: $") # TAEF syntax
  120. rxDiagReport = re.compile(r' (?:File (.*?) )?Line (\d+): (.*)$')
  121. rxDiag = re.compile(r'((expected|fxc)-(error|warning|note|pass)\s*\{\{(.*?)\}\}\s*)')
  122. rxFxcErr = re.compile(r'(.+)\((\d+)(?:,(\d+)(?:\-(\d+))?)?\)\: (error|warning) (.*?)\: (.*)')
  123. # groups = (filename, line, colstart, colend, ew, error_code, error_message)
  124. rxCommentStart = re.compile(r'(//|/\*)')
  125. rxStrings = re.compile(r'(\'|\").*?((?<!\\)\1)')
  126. rxBraces = re.compile(r'(\(|\)|\{|\}|\[|\])')
  127. rxStatementEndOrBlockBegin = re.compile(r'(\;|\{)')
  128. rxLineContinued = re.compile(r'.*\\$')
  129. rxVerifyArguments = re.compile(r'\s*//\s*\:FXC_VERIFY_ARGUMENTS\:\s+(.*)')
  130. rxVerifierTestMethod = re.compile(r'TEST_F\(VerifierTest,\s*(\w+)\)\s*')
  131. rxVerifierTestCheckFile = re.compile(r'CheckVerifiesHLSL\s*\(\s*L?\"([^"]+)"\s*\)')
  132. rxVerifyAst = re.compile(r'^\s*(\/\*verify\-ast)\s*$') # must start with line containing only "/*verify-ast"
  133. rxEndVerifyAst = re.compile(r'^\s*\*\/\s*$') # ends with line containing only "*/"
  134. rxAstSourceLocation = re.compile(
  135. r'''\<(?:(?P<Invalid>\<invalid\ sloc\>) |
  136. (?:
  137. (?:(?:(?P<FromFileLine>line|\S*):(?P<FromLine>\d+):(?P<FromLineCol>\d+)) |
  138. col:(?P<FromCol>\d+)
  139. )
  140. (?:,\s+
  141. (?:(?:(?P<ToFileLine>line|\S*):(?P<ToLine>\d+):(?P<ToLineCol>\d+)) |
  142. col:(?P<ToCol>\d+)
  143. )
  144. )?
  145. )
  146. )\>''',
  147. re.VERBOSE)
  148. rxAstHexAddress = re.compile(r'\b(0x[0-9a-f]+) ?')
  149. rxAstNode = re.compile(r'((?:\<\<\<NULL\>\>\>)|(?:\w+))\s*(.*)')
  150. # matches ignored portion of line for first AST node in subgraph to match
  151. rxAstIgnoredIndent = re.compile(r'^(\s+|\||\`|\-)*')
  152. # The purpose of StripComments and CountBraces is to be used when commenting lines of code out to allow
  153. # Fxc testing to continue even when it doesn't recover as well as clang. Some error lines are on the
  154. # beginning of a function, where commenting just that line will comment out the beginning of the function
  155. # block, but not the body or end of the block, producing invalid syntax. Here's an example:
  156. # void foo(error is here) { /* expected-error {{some expected clang error}} */
  157. # return;
  158. # }
  159. # If the first line is commented without the rest of the function, it will be incorrect code.
  160. # So the intent is to detect when the line being commented out results in an unbalanced brace matching.
  161. # Then these functions will be used to comment additional lines until the braces match again.
  162. # It's simple and won't handle the general case, but should handle the cases in the test files, and if
  163. # not, the tests should be easily modifyable to work with it.
  164. # This still does not handle preprocessor directives, or escaped characters (like line ends or escaped
  165. # quotes), or other cases that a real parser would handle.
  166. def StripComments(line, multiline_comment_continued = False):
  167. "Remove comments from line, returns stripped line and multiline_comment_continued if a multiline comment continues beyond the line"
  168. if multiline_comment_continued:
  169. # in multiline comment, only look for end of that
  170. idx = line.find('*/')
  171. if idx < 0:
  172. return '', True
  173. return StripComments(line[idx+2:])
  174. # look for start of multiline comment or eol comment:
  175. m = rxCommentStart.search(line)
  176. if m:
  177. if m.group(1) == '/*':
  178. line_end, multiline_comment_continued = StripComments(line[m.end(1):], True)
  179. return line[:m.start(1)] + line_end, multiline_comment_continued
  180. elif m.group(1) == '//':
  181. return line[:m.start(1)], False
  182. return line, False
  183. def CountBraces(line, bracestacks):
  184. m = rxStrings.search(line)
  185. if m:
  186. CountBraces(line[:m.start(1)], bracestacks)
  187. CountBraces(line[m.end(2):], bracestacks)
  188. return
  189. for b in rxBraces.findall(line):
  190. if b in '()':
  191. bracestacks['()'] = bracestacks.get('()', 0) + ((b == '(') and 1 or -1)
  192. elif b in '{}':
  193. bracestacks['{}'] = bracestacks.get('{}', 0) + ((b == '{') and 1 or -1)
  194. elif b in '[]':
  195. bracestacks['[]'] = bracestacks.get('[]', 0) + ((b == '[') and 1 or -1)
  196. def ProcessStatementOrBlock(lines, start, fn_process):
  197. num = 0
  198. # statement_continued initialized with whether line has non-whitespace content
  199. statement_continued = not not StripComments(lines[start], False)[0].strip()
  200. # Assumes start of line is not inside multiline comment
  201. multiline_comment_continued = False
  202. bracestacks = {}
  203. while start+num < len(lines):
  204. line = lines[start+num]
  205. lines[start+num] = fn_process(line)
  206. num += 1
  207. line, multiline_comment_continued = StripComments(line, multiline_comment_continued)
  208. CountBraces(line, bracestacks)
  209. if (statement_continued and
  210. not rxStatementEndOrBlockBegin.search(line) ):
  211. continue
  212. statement_continued = False
  213. if rxLineContinued.match(line):
  214. continue
  215. if (bracestacks.get('{}', 0) < 1 and
  216. bracestacks.get('()', 0) < 1 and
  217. bracestacks.get('[]', 0) < 1 ):
  218. break
  219. return num
  220. def CommentStatementOrBlock(lines, start):
  221. def fn_process(line):
  222. return '// ' + line
  223. return ProcessStatementOrBlock(lines, start, fn_process)
  224. def ParseVerifierTestCpp():
  225. "Returns dictionary mapping Run* test name to hlsl filename by parsing VerifierTest.cpp"
  226. tests = {}
  227. FoundTest = None
  228. def fn_null(line):
  229. return line
  230. def fn_process(line):
  231. searching = FoundTest is not None
  232. if searching:
  233. m = rxVerifierTestCheckFile.search(line)
  234. if m:
  235. tests[FoundTest] = m.group(1)
  236. searching = False
  237. return line
  238. with open(HlslVerifierTestCpp, 'rt') as f:
  239. lines = f.readlines()
  240. start = 0
  241. while start < len(lines):
  242. m = rxVerifierTestMethod.search(lines[start])
  243. if m:
  244. FoundTest = m.group(1)
  245. start += ProcessStatementOrBlock(lines, start, fn_process)
  246. if FoundTest not in tests:
  247. print('Could not parse file for test %s' % FoundTest)
  248. FoundTest = None
  249. else:
  250. start += ProcessStatementOrBlock(lines, start, fn_null)
  251. return tests
  252. class SourceLocation(object):
  253. def __init__(self, line=None, **kwargs):
  254. if not kwargs:
  255. self.Invalid = '<invalid sloc>'
  256. return
  257. for key, value in kwargs.items():
  258. try: value = int(value)
  259. except: pass
  260. setattr(self, key, value)
  261. if line and not self.FromLine:
  262. self.FromLine = line
  263. self.FromCol = self.FromCol or self.FromLineCol
  264. self.ToCol = self.ToCol or self.ToLineCol
  265. def Offset(self, offset):
  266. "Offset From/To Lines by specified value"
  267. if self.Invalid:
  268. return
  269. if self.FromLine:
  270. self.FromLine = self.FromLine + offset
  271. if self.ToLine:
  272. self.ToLine = self.ToLine + offset
  273. def ToStringAtLine(self, line):
  274. "convert to string relative to specified line"
  275. if self.Invalid:
  276. sloc = self.Invalid
  277. else:
  278. if self.FromLine and line != self.FromLine:
  279. sloc = 'line:%d:%d' % (self.FromLine, self.FromCol)
  280. line = self.FromLine
  281. else:
  282. sloc = 'col:%d' % self.FromCol
  283. if self.ToCol:
  284. if self.ToLine and line != self.ToLine:
  285. sloc += ', line:%d:%d' % (self.ToLine, self.ToCol)
  286. else:
  287. sloc += ', col:%d' % self.ToCol
  288. return '<' + sloc + '>'
  289. class AstNode(object):
  290. def __init__(self, name, sloc, prefix, text, indent=''):
  291. self.name, self.sloc, self.prefix, self.text, self.indent = name, sloc, prefix, text, indent
  292. self.children = []
  293. def ToStringAtLine(self, line):
  294. "convert to string relative to specified line"
  295. if self.name == '<<<NULL>>>':
  296. return self.name
  297. return ('%s %s%s %s' % (self.name, self.prefix, self.sloc.ToStringAtLine(line), self.text)).strip()
  298. def WalkAstChildren(ast_root):
  299. "yield each child node in the ast tree in depth-first order"
  300. for node in ast_root.children:
  301. yield node
  302. for child in WalkAstChildren(node):
  303. yield child
  304. def WriteAstSubtree(ast_root, line, indent=''):
  305. output = []
  306. output.append(indent + ast_root.ToStringAtLine(line))
  307. if not ast_root.sloc.Invalid and ast_root.sloc.FromLine:
  308. line = ast_root.sloc.FromLine
  309. root_indent_len = len(ast_root.indent)
  310. for child in WalkAstChildren(ast_root):
  311. output.append(indent + child.indent[root_indent_len:] + child.ToStringAtLine(line))
  312. if not child.sloc.Invalid and child.sloc.FromLine:
  313. line = child.sloc.FromLine
  314. return output
  315. def FindAstNodesByLine(ast_root, line):
  316. nodes = []
  317. if not ast_root.sloc.Invalid and ast_root.sloc.FromLine == line:
  318. return [ast_root]
  319. if not ast_root.sloc.Invalid and ast_root.sloc.ToLine and ast_root.sloc.ToLine < line:
  320. return []
  321. for child in ast_root.children:
  322. sub_nodes = FindAstNodesByLine(child, line)
  323. if sub_nodes:
  324. nodes += sub_nodes
  325. return nodes
  326. def ParseAst(astlines):
  327. cur_line = 0 # current source line
  328. root_node = None
  329. ast_stack = [] # stack of nodes and column numbers so we can pop the right number of nodes up the stack
  330. i = 0 # ast line index
  331. def push(node, col):
  332. if ast_stack:
  333. cur_node, prior_col = ast_stack[-1]
  334. cur_node.children.append(node)
  335. ast_stack.append((node, col))
  336. def popto(col):
  337. cur_node, prior_col = ast_stack[-1]
  338. while ast_stack and col <= prior_col:
  339. ast_stack.pop()
  340. cur_node, prior_col = ast_stack[-1]
  341. assert ast_stack
  342. def parsenode(text, indent):
  343. m = rxAstNode.match(text)
  344. if m:
  345. name = m.group(1)
  346. text = text[m.end(1):].strip()
  347. else:
  348. print('rxAstNode match failed on:\n %s' % text)
  349. return AstNode('ast-parse-failed', SourceLocation(), '', '', indent)
  350. text = rxAstHexAddress.sub('', text).strip()
  351. m = rxAstSourceLocation.search(text)
  352. if m:
  353. prefix = text[:m.start()]
  354. sloc = SourceLocation(cur_line, **m.groupdict())
  355. text = text[m.end():].strip()
  356. else:
  357. prefix = ''
  358. sloc = SourceLocation()
  359. return AstNode(name, sloc, prefix, text, indent)
  360. # Look for TranslationUnitDecl and start from there
  361. while i < len(astlines):
  362. text = astlines[i]
  363. if text.startswith('TranslationUnitDecl'):
  364. root_node = parsenode(text, '')
  365. push(root_node, 0)
  366. break
  367. i += 1
  368. i += 1
  369. # gather ast nodes
  370. while i < len(astlines):
  371. line = astlines[i]
  372. # get starting column and update stack
  373. m = rxAstIgnoredIndent.match(line)
  374. indent = ''
  375. col = 0
  376. if m:
  377. indent = m.group(0)
  378. col = m.end()
  379. if col == 0:
  380. break # at this point we should be done parsing the translation unit!
  381. popto(col)
  382. # parse and add the node
  383. node = parsenode(line[col:], indent)
  384. if not node:
  385. print('error parsing line %d:\n%s' % (i+1, line))
  386. assert False
  387. push(node, col)
  388. # update current source line
  389. sloc = node.sloc
  390. if not sloc.Invalid and sloc.FromLine:
  391. cur_line = sloc.FromLine
  392. i += 1
  393. return root_node
  394. class File(object):
  395. def __init__(self, filename):
  396. self.filename = filename
  397. self.expected = {} # {line_num: [('error' or 'warning', 'error or warning message'), ...], ...}
  398. self.unexpected = {} # {line_num: [('error' or 'warning', 'error or warning message'), ...], ...}
  399. self.last_diag_col = None
  400. def AddExpected(self, line_num, ew, message):
  401. self.expected.setdefault(line_num, []).append((ew, message))
  402. def AddUnexpected(self, line_num, ew, message):
  403. self.unexpected.setdefault(line_num, []).append((ew, message))
  404. def MatchDiags(self, line, diags=[], prefix='expected', matchall=False):
  405. diags = diags[:]
  406. diag_col = None
  407. matches = []
  408. for m in rxDiag.finditer(line):
  409. if diag_col is None:
  410. diag_col = m.start()
  411. self.last_diag_col = diag_col
  412. if m.group(2) == prefix:
  413. pattern = m.groups()[2:4]
  414. for idx, (ew, message) in enumerate(diags):
  415. if pattern == (ew, message):
  416. matches.append(m)
  417. break
  418. else:
  419. if matchall:
  420. matches.append(m)
  421. continue
  422. del diags[idx]
  423. return sorted(matches, key=lambda m: m.start()), diags, diag_col
  424. def RemoveDiags(self, line, diags, prefix='expected', removeall=False):
  425. """Removes expected-* diags from line, returns result_line, remaining_diags, diag_col
  426. Where, result_line is the line without the matching diagnostics,
  427. remaining is the list of diags not found on the line,
  428. diag_col is the column of the first diagnostic found on the line.
  429. """
  430. matches, diags, diag_col = self.MatchDiags(line, diags, prefix, removeall)
  431. for m in reversed(matches):
  432. line = line[:m.start()] + line[m.end():]
  433. return line, diags, diag_col
  434. def AddDiags(self, line, diags, diag_col=None, prefix='expected'):
  435. "Adds expected-* diags to line."
  436. if diags:
  437. if diag_col is None:
  438. if self.last_diag_col is not None and self.last_diag_col-3 > len(line):
  439. diag_col = self.last_diag_col
  440. else:
  441. diag_col = max(len(line) + 7, 63) # 4 spaces + '/* ' or at column 63, whichever is greater
  442. line = line + (' ' * ((diag_col - 3) - len(line))) + '/* */'
  443. for ew, message in reversed(diags):
  444. line = line[:diag_col] + ('%s-%s {{%s}} ' % (prefix, ew, message)) + line[diag_col:]
  445. return line.rstrip()
  446. def SortDiags(self, line):
  447. matches = list(rxDiag.finditer(line))
  448. if matches:
  449. for m in sorted(matches, key=lambda m: m.start(), reverse=True):
  450. line = line[:m.start()] + line[m.end():]
  451. diag_col = m.start()
  452. for m in sorted(matches, key=lambda m: m.groups()[1:], reverse=True):
  453. line = line[:diag_col] + ('%s-%s {{%s}} ' % m.groups()[1:]) + line[diag_col:]
  454. return line.rstrip()
  455. def OutputResult(self):
  456. temp_filename = os.path.expandvars(r'${TEMP}\%s' % os.path.split(self.filename)[1])
  457. with open(self.filename, 'rt') as fin:
  458. with open(temp_filename+'.result', 'wt') as fout:
  459. line_num = 0
  460. for line in fin.readlines():
  461. if line[-1] == '\n':
  462. line = line[:-1]
  463. line_num += 1
  464. line, expected, diag_col = self.RemoveDiags(line, self.expected.get(line_num, []))
  465. for ew, message in expected:
  466. print('Error: Line %d: Could not find: expected-%s {{%s}}!!' % (line_num, ew, message))
  467. line = self.AddDiags(line, self.unexpected.get(line_num, []), diag_col)
  468. line = self.SortDiags(line)
  469. fout.write(line + '\n')
  470. def TryFxc(self, result_filename=None):
  471. temp_filename = os.path.expandvars(r'${TEMP}\%s' % os.path.split(self.filename)[1])
  472. if result_filename is None:
  473. result_filename = temp_filename + '.fxc'
  474. inlines = []
  475. with open(self.filename, 'rt') as fin:
  476. for line in fin.readlines():
  477. if line[-1] == '\n':
  478. line = line[:-1]
  479. inlines.append(line)
  480. verify_arguments = None
  481. for line in inlines:
  482. m = rxVerifyArguments.search(line)
  483. if m:
  484. verify_arguments = m.group(1)
  485. print('Found :FXC_VERIFY_ARGUMENTS: %s' % verify_arguments)
  486. break
  487. # result will hold the final result after adding fxc error messages
  488. # initialize it by removing all the expected diagnostics
  489. result = [(line, None, False) for line in inlines]
  490. for n, (line, diag_col, expected) in enumerate(result):
  491. line, diags, diag_col = self.RemoveDiags(line, [], prefix='fxc', removeall=True)
  492. matches, diags, diag_col2 = self.MatchDiags(line, [], prefix='expected', matchall=True)
  493. if matches:
  494. expected = True
  495. ## if diag_col is None:
  496. ## diag_col = diag_col2
  497. ## elif diag_col2 < diag_col:
  498. ## diag_col = diag_col2
  499. result[n] = (line, diag_col, expected)
  500. # commented holds the version that gets progressively commented as fxc reports errors
  501. commented = inlines[:]
  502. # diags_by_line is a dictionary of a set of errors and warnings keyed off line_num
  503. diags_by_line = {}
  504. while True:
  505. with open(temp_filename+'.fxc_temp', 'wt') as fout:
  506. fout.write('\n'.join(commented))
  507. if verify_arguments is None:
  508. fout.write("\n[numthreads(1,1,1)] void _test_main() { }\n")
  509. if verify_arguments is None:
  510. args = '/E _test_main /T cs_5_1'
  511. else:
  512. args = verify_arguments
  513. os.system('%s /nologo "%s.fxc_temp" %s /DVERIFY_FXC=1 /Fo "%s.fxo" /Fe "%s.err" 1> "%s.log" 2>&1' %
  514. (FxcPath, temp_filename, args, temp_filename, temp_filename, temp_filename))
  515. with open(temp_filename+'.err', 'rt') as f:
  516. errors = [m for m in map(rxFxcErr.match, f.readlines()) if m]
  517. errors = sorted(errors, key=lambda m: int(m.group(2)))
  518. first_error = None
  519. for m in errors:
  520. line_num = int(m.group(2))
  521. if not first_error and m.group(5) == 'error':
  522. first_error = line_num
  523. elif first_error and line_num > first_error:
  524. break
  525. diags_by_line.setdefault(line_num, set()).add((m.group(5), m.group(6) + ': ' + m.group(7)))
  526. if first_error and first_error <= len(commented):
  527. CommentStatementOrBlock(commented, first_error-1)
  528. else:
  529. break
  530. # Add diagnostic messages from fxc to result:
  531. self.last_diag_col = None
  532. for i, (line, diag_col, expected) in enumerate(result):
  533. line_num = i + 1
  534. if diag_col:
  535. self.last_diag_col = diag_col
  536. diags = diags_by_line.get(line_num, set())
  537. if not diags:
  538. if expected:
  539. diags.add(('pass', ''))
  540. else:
  541. continue
  542. diags = sorted(list(diags))
  543. line = self.SortDiags(self.AddDiags(line, diags, diag_col, prefix='fxc'))
  544. result[i] = line, diag_col, expected
  545. with open(result_filename, 'wt') as f:
  546. f.write('\n'.join(map((lambda res: res[0]), result)))
  547. def TryAst(self, result_filename=None):
  548. temp_filename = os.path.expandvars(r'${TEMP}\%s' % os.path.split(self.filename)[1])
  549. if result_filename is None:
  550. result_filename = temp_filename + '.ast'
  551. try: os.unlink(temp_filename+'.ast_dump')
  552. except: pass
  553. try: os.unlink(result_filename)
  554. except: pass
  555. ## result = os.system('%s\\clang.exe -cc1 -fsyntax-only -ast-dump %s 1>"%s.ast_dump" 2>"%s.log"' %
  556. result = os.system('%s\\dxc.exe -ast-dump %s -E main -T ps_5_0 1>"%s.ast_dump" 2>"%s.log"' %
  557. (HlslBinDir, self.filename, temp_filename, temp_filename))
  558. # dxc dumps ast even if there exists any syntax error. If there is any error, dxc returns some nonzero errorcode.
  559. if not os.path.isfile(temp_filename+'.ast_dump'):
  560. print('ast-dump failed, see log:\n %s.log' % (temp_filename))
  561. return
  562. ## elif result:
  563. ## print('ast-dump succeeded, but exited with error code %d, see log:\n %s.log' % (result, temp_filename))
  564. astlines = []
  565. with open(temp_filename+'.ast_dump', 'rt') as fin:
  566. for line in fin.readlines():
  567. if line[-1] == '\n':
  568. line = line[:-1]
  569. astlines.append(line)
  570. try:
  571. ast_root = ParseAst(astlines)
  572. except:
  573. print('ParseAst failed on "%s"' % (temp_filename + '.ast_dump'))
  574. raise
  575. inlines = []
  576. with open(self.filename, 'rt') as fin:
  577. for line in fin.readlines():
  578. if line[-1] == '\n':
  579. line = line[:-1]
  580. inlines.append(line)
  581. outlines = []
  582. i = 0
  583. while i < len(inlines):
  584. line = inlines[i]
  585. outlines.append(line)
  586. m = rxVerifyAst.match(line)
  587. if m:
  588. indent = line[:m.start(1)] + ' '
  589. # at this point i is the ONE based source line number
  590. # (since it's one past the line we want to verify in zero based index)
  591. ast_nodes = FindAstNodesByLine(ast_root, i)
  592. if not ast_nodes:
  593. outlines += [indent + 'No matching AST found for line!']
  594. else:
  595. for ast in ast_nodes:
  596. outlines += WriteAstSubtree(ast, i, indent)
  597. while i+1 < len(inlines) and not rxEndVerifyAst.match(inlines[i+1]):
  598. i += 1
  599. i += 1
  600. with open(result_filename, 'wt') as f:
  601. f.write('\n'.join(outlines))
  602. def ProcessVerifierOutput(lines):
  603. files = {}
  604. cur_filename = None
  605. cur_test = None
  606. state = 'WaitingForFile'
  607. ew = ''
  608. expected = None
  609. for line in lines:
  610. if not line:
  611. continue
  612. if line[-1] == '\n':
  613. line = line[:-1]
  614. m = rxRUN.match(line)
  615. if m:
  616. cur_test = m.group(1)
  617. m = rxForProgram.match(line)
  618. if m:
  619. cur_filename = m.group(1)
  620. files[cur_filename] = File(cur_filename)
  621. state = 'WaitingForCategory'
  622. continue
  623. if state == 'WaitingForFile':
  624. m = rxEndGroup.match(line)
  625. if m and m.group(2) == 'Failed':
  626. # This usually happens when compiler crashes
  627. print('Fatal Error: test %s failed without verifier results.' % cur_test)
  628. if state == 'WaitingForCategory' or state == 'ReadingErrors':
  629. m = rxExpected.match(line)
  630. if m:
  631. ew = m.group(1)
  632. expected = m.group(2) == 'expected but not seen'
  633. state = 'ReadingErrors'
  634. continue
  635. if state == 'ReadingErrors':
  636. m = rxDiagReport.match(line)
  637. if m:
  638. line_num = int(m.group(2))
  639. if expected:
  640. files[cur_filename].AddExpected(line_num, ew, m.group(3))
  641. else:
  642. files[cur_filename].AddUnexpected(line_num, ew, m.group(3))
  643. continue
  644. for f in files.values():
  645. f.OutputResult()
  646. return files
  647. def maybe_compare(filename1, filename2):
  648. with open(filename1, 'rt') as fbefore:
  649. with open(filename2, 'rt') as fafter:
  650. before = fbefore.read()
  651. after = fafter.read()
  652. if before.strip() != after.strip():
  653. print('Differences found. Compare:\n %s\nwith:\n %s' % (filename1, filename2))
  654. if DiffTool:
  655. os.system('%s %s %s' % (DiffTool, filename1, filename2))
  656. return True
  657. return False
  658. def PrintUsage():
  659. print(__doc__)
  660. print('Available tests and corresponding files:')
  661. tests = sorted(VerifierTests.keys())
  662. width = len(max(tests, key=len))
  663. for name in tests:
  664. print((' %%-%ds %%s' % width) % (name, VerifierTests[name]))
  665. print('Tests incompatible with fxc mode:')
  666. for name in fxcExcludedTests:
  667. print(' %s' % name)
  668. def RunVerifierTest(test, HlslDataDir=HlslDataDir):
  669. import codecs
  670. temp_filename = os.path.expandvars(r'${TEMP}\VerifierHelper_temp.txt')
  671. cmd = ('te %s\\clang-hlsl-tests.dll /p:"HlslDataDir=%s" /name:VerifierTest::%s > %s' %
  672. (HlslBinDir, HlslDataDir, test, temp_filename))
  673. print(cmd)
  674. os.system(cmd) # TAEF test
  675. # TAEF outputs unicode, so read as binary and convert:
  676. with open(temp_filename, 'rb') as f:
  677. return codecs.decode(f.read(), 'UTF-16').replace(u'\x7f', u'').replace(u'\r\n', u'\n').splitlines()
  678. def main(*args):
  679. global VerifierTests
  680. try:
  681. VerifierTests = ParseVerifierTestCpp()
  682. except:
  683. print('Unable to parse tests from VerifierTest.cpp; using defaults')
  684. if len(args) < 1 or (args[0][0] in '-/' and args[0][1:].lower() in ('h', '?', 'help')):
  685. PrintUsage()
  686. return -1
  687. mode = args[0]
  688. if mode == 'fxc':
  689. allFxcTests = sorted(filter(lambda key: key not in fxcExcludedTests, VerifierTests.keys()))
  690. if args[1] == '*':
  691. tests = allFxcTests
  692. else:
  693. if args[1] not in allFxcTests:
  694. PrintUsage()
  695. return -1
  696. tests = [args[1]]
  697. differences = False
  698. for test in tests:
  699. print('---- %s ----' % test)
  700. filename = os.path.join(HlslDataDir, VerifierTests[test])
  701. result_filename = os.path.expandvars(r'${TEMP}\%s.fxc' % os.path.split(filename)[1])
  702. File(filename).TryFxc()
  703. differences = maybe_compare(filename, result_filename) or differences
  704. if not differences:
  705. print('No differences found!')
  706. elif mode == 'clang':
  707. if args[1] != '*' and args[1] not in VerifierTests:
  708. PrintUsage()
  709. return -1
  710. files = ProcessVerifierOutput(RunVerifierTest(args[1]))
  711. differences = False
  712. if files:
  713. for f in files.values():
  714. if f.expected or f.unexpected:
  715. result_filename = os.path.expandvars(r'${TEMP}\%s.result' % os.path.split(f.filename)[1])
  716. differences = maybe_compare(f.filename, result_filename) or differences
  717. if not differences:
  718. print('No differences found!')
  719. elif mode == 'ast':
  720. allAstTests = sorted(VerifierTests.keys())
  721. if args[1] == '*':
  722. tests = allAstTests
  723. else:
  724. if args[1] not in allAstTests:
  725. PrintUsage()
  726. return -1
  727. tests = [args[1]]
  728. differences = False
  729. for test in tests:
  730. print('---- %s ----' % test)
  731. filename = os.path.join(HlslDataDir, VerifierTests[test])
  732. result_filename = os.path.expandvars(r'${TEMP}\%s.ast' % os.path.split(filename)[1])
  733. File(filename).TryAst()
  734. differences = maybe_compare(filename, result_filename) or differences
  735. if not differences:
  736. print('No differences found!')
  737. elif mode == 'all':
  738. allTests = sorted(VerifierTests.keys())
  739. if args[1] == '*':
  740. tests = allTests
  741. else:
  742. if args[1] not in allTests:
  743. PrintUsage()
  744. return -1
  745. tests = [args[1]]
  746. # Do clang verifier tests, updating source file paths for changed files:
  747. sourceFiles = dict([(VerifierTests[test], os.path.join(HlslDataDir, VerifierTests[test])) for test in tests])
  748. files = ProcessVerifierOutput(RunVerifierTest(args[1]))
  749. if files:
  750. for f in files.values():
  751. if f.expected or f.unexpected:
  752. name = os.path.split(f.filename)[1]
  753. sourceFiles[name] = os.path.expandvars(r'${TEMP}\%s.result' % name)
  754. # update verify-ast blocks:
  755. for name, sourceFile in sourceFiles.items():
  756. result_filename = os.path.expandvars(r'${TEMP}\%s.ast' % name)
  757. File(sourceFile).TryAst(result_filename)
  758. sourceFiles[name] = result_filename
  759. # now do fxc verification and final comparison
  760. differences = False
  761. fxcExcludedFiles = [VerifierTests[test] for test in fxcExcludedTests]
  762. width = len(max(tests, key=len))
  763. for test in tests:
  764. name = VerifierTests[test]
  765. sourceFile = sourceFiles[name]
  766. print(('Test %%-%ds - %%s' % width) % (test, name))
  767. result_filename = os.path.expandvars(r'${TEMP}\%s.fxc' % name)
  768. if name not in fxcExcludedFiles:
  769. File(sourceFile).TryFxc(result_filename)
  770. sourceFiles[name] = result_filename
  771. differences = maybe_compare(os.path.join(HlslDataDir, name), sourceFiles[name]) or differences
  772. if not differences:
  773. print('No differences found!')
  774. else:
  775. PrintUsage()
  776. return -1
  777. return 0
  778. if __name__ == '__main__':
  779. sys.exit(main(*sys.argv[1:]))