Browse Source

Merge branch 'master' into windows-llvm-13.0.0

gingerBill 2 years ago
parent
commit
2242ffcca6
46 changed files with 908 additions and 706 deletions
  1. 4 1
      build.bat
  2. 7 4
      build_odin.sh
  3. 1 0
      core/compress/zlib/zlib.odin
  4. 17 20
      core/encoding/entity/entity.odin
  5. 21 23
      core/encoding/xml/debug_print.odin
  6. 3 3
      core/encoding/xml/example/xml_example.odin
  7. 17 12
      core/encoding/xml/helpers.odin
  8. 16 16
      core/encoding/xml/tokenizer.odin
  9. 15 27
      core/encoding/xml/xml_reader.odin
  10. 11 11
      core/fmt/fmt.odin
  11. 8 7
      core/image/netpbm/helpers.odin
  12. 1 0
      core/image/netpbm/netpbm.odin
  13. 3 4
      core/image/png/helpers.odin
  14. 8 8
      core/image/png/png.odin
  15. 12 12
      core/math/math.odin
  16. 52 52
      core/mem/allocators.odin
  17. 35 35
      core/net/url.odin
  18. 86 92
      core/odin/printer/visit.odin
  19. 16 16
      core/odin/tokenizer/tokenizer.odin
  20. 118 98
      core/runtime/dynamic_map_internal.odin
  21. 9 9
      core/runtime/error_checks.odin
  22. 6 6
      core/runtime/print.odin
  23. 2 0
      core/text/i18n/i18n.odin
  24. 34 8
      core/text/i18n/qt_linguist.odin
  25. 2 2
      core/thread/thread_windows.odin
  26. 19 17
      core/time/time.odin
  27. 1 0
      examples/demo/demo.odin
  28. 33 2
      src/build_settings.cpp
  29. 2 2
      src/check_decl.cpp
  30. 3 3
      src/check_expr.cpp
  31. 6 0
      src/check_stmt.cpp
  32. 6 0
      src/check_type.cpp
  33. 43 9
      src/checker.cpp
  34. 3 0
      src/checker.hpp
  35. 50 7
      src/main.cpp
  36. 85 0
      src/parser.cpp
  37. 2 0
      src/parser.hpp
  38. 14 20
      tests/core/encoding/hxa/test_core_hxa.odin
  39. 33 36
      tests/core/encoding/xml/test_core_xml.odin
  40. 4 10
      tests/core/math/linalg/glsl/test_linalg_glsl_math.odin
  41. 73 94
      tests/core/math/test_core_math.odin
  42. 6 12
      tests/core/path/filepath/test_core_filepath.odin
  43. 16 20
      tests/core/reflect/test_core_reflect.odin
  44. 2 6
      tests/core/text/i18n/test_core_text_i18n.odin
  45. 2 2
      vendor/darwin/Metal/MetalClasses.odin
  46. 1 0
      vendor/fontstash/fontstash.odin

+ 4 - 1
build.bat

@@ -51,7 +51,10 @@ set compiler_flags= -nologo -Oi -TP -fp:precise -Gm- -MP -FC -EHsc- -GR- -GF
 set compiler_defines= -DODIN_VERSION_RAW=\"%odin_version_raw%\"
 set compiler_defines= -DODIN_VERSION_RAW=\"%odin_version_raw%\"
 
 
 if not exist .git\ goto skip_git_hash
 if not exist .git\ goto skip_git_hash
-for /f %%i in ('git rev-parse --short HEAD') do set GIT_SHA=%%i
+for /f "tokens=1,2" %%i IN ('git show "--pretty=%%cd %%h" "--date=format:%%Y-%%m" --no-patch --no-notes HEAD') do (
+	set odin_version_raw=dev-%%i
+	set GIT_SHA=%%j
+)
 if %ERRORLEVEL% equ 0 set compiler_defines=%compiler_defines% -DGIT_SHA=\"%GIT_SHA%\"
 if %ERRORLEVEL% equ 0 set compiler_defines=%compiler_defines% -DGIT_SHA=\"%GIT_SHA%\"
 :skip_git_hash
 :skip_git_hash
 
 

+ 7 - 4
build_odin.sh

@@ -8,17 +8,20 @@ set -eu
 : ${ODIN_VERSION=dev-$(date +"%Y-%m")}
 : ${ODIN_VERSION=dev-$(date +"%Y-%m")}
 : ${GIT_SHA=}
 : ${GIT_SHA=}
 
 
-CPPFLAGS="$CPPFLAGS -DODIN_VERSION_RAW=\"$ODIN_VERSION\""
 CXXFLAGS="$CXXFLAGS -std=c++14"
 CXXFLAGS="$CXXFLAGS -std=c++14"
 LDFLAGS="$LDFLAGS -pthread -lm -lstdc++"
 LDFLAGS="$LDFLAGS -pthread -lm -lstdc++"
 
 
-if [ -d ".git" ]; then
-	GIT_SHA=$(git rev-parse --short HEAD || :)
-	if [ "$GIT_SHA" ]; then
+if [ -d ".git" ] && [ $(which git) ]; then
+	versionTag=( $(git show --pretty='%cd %h' --date=format:%Y-%m --no-patch --no-notes HEAD) )
+	if [ $? -eq 0 ]; then
+		ODIN_VERSION="dev-${versionTag[0]}"
+		GIT_SHA="${versionTag[1]}"
 		CPPFLAGS="$CPPFLAGS -DGIT_SHA=\"$GIT_SHA\""
 		CPPFLAGS="$CPPFLAGS -DGIT_SHA=\"$GIT_SHA\""
 	fi
 	fi
 fi
 fi
 
 
+CPPFLAGS="$CPPFLAGS -DODIN_VERSION_RAW=\"$ODIN_VERSION\""
+
 DISABLED_WARNINGS="-Wno-switch -Wno-macro-redefined -Wno-unused-value"
 DISABLED_WARNINGS="-Wno-switch -Wno-macro-redefined -Wno-unused-value"
 OS=$(uname)
 OS=$(uname)
 
 

+ 1 - 0
core/compress/zlib/zlib.odin

@@ -1,3 +1,4 @@
+//+vet !using-param
 package zlib
 package zlib
 
 
 /*
 /*

+ 17 - 20
core/encoding/entity/entity.odin

@@ -184,28 +184,26 @@ decode_xml :: proc(input: string, options := XML_Decode_Options{}, allocator :=
 
 
 advance :: proc(t: ^Tokenizer) -> (err: Error) {
 advance :: proc(t: ^Tokenizer) -> (err: Error) {
 	if t == nil { return .Tokenizer_Is_Nil }
 	if t == nil { return .Tokenizer_Is_Nil }
-	using t
-
 	#no_bounds_check {
 	#no_bounds_check {
-		if read_offset < len(src) {
-			offset = read_offset
-			r, w   = rune(src[read_offset]), 1
+		if t.read_offset < len(t.src) {
+			t.offset = t.read_offset
+			t.r, t.w   = rune(t.src[t.read_offset]), 1
 			switch {
 			switch {
-			case r == 0:
+			case t.r == 0:
 				return .Illegal_NUL_Character
 				return .Illegal_NUL_Character
-			case r >= utf8.RUNE_SELF:
-				r, w = utf8.decode_rune_in_string(src[read_offset:])
-				if r == utf8.RUNE_ERROR && w == 1 {
+			case t.r >= utf8.RUNE_SELF:
+				t.r, t.w = utf8.decode_rune_in_string(t.src[t.read_offset:])
+				if t.r == utf8.RUNE_ERROR && t.w == 1 {
 					return .Illegal_UTF_Encoding
 					return .Illegal_UTF_Encoding
-				} else if r == utf8.RUNE_BOM && offset > 0 {
+				} else if t.r == utf8.RUNE_BOM && t.offset > 0 {
 					return .Illegal_BOM
 					return .Illegal_BOM
 				}
 				}
 			}
 			}
-			read_offset += w
+			t.read_offset += t.w
 			return .None
 			return .None
 		} else {
 		} else {
-			offset = len(src)
-			r = -1
+			t.offset = len(t.src)
+			t.r = -1
 			return
 			return
 		}
 		}
 	}
 	}
@@ -273,26 +271,25 @@ _extract_xml_entity :: proc(t: ^Tokenizer) -> (entity: string, err: Error) {
 		All of these would be in the ASCII range.
 		All of these would be in the ASCII range.
 		Even if one is not, it doesn't matter. All characters we need to compare to extract are.
 		Even if one is not, it doesn't matter. All characters we need to compare to extract are.
 	*/
 	*/
-	using t
 
 
 	length := len(t.src)
 	length := len(t.src)
 	found  := false
 	found  := false
 
 
 	#no_bounds_check {
 	#no_bounds_check {
-		for read_offset < length {
-			if src[read_offset] == ';' {
+		for t.read_offset < length {
+			if t.src[t.read_offset] == ';' {
+				t.read_offset += 1
 				found = true
 				found = true
-				read_offset += 1
 				break
 				break
 			}
 			}
-			read_offset += 1
+			t.read_offset += 1
 		}
 		}
 	}
 	}
 
 
 	if found {
 	if found {
-		return string(src[offset + 1 : read_offset - 1]), .None
+		return string(t.src[t.offset + 1 : t.read_offset - 1]), .None
 	}
 	}
-	return string(src[offset : read_offset]), .Invalid_Entity_Encoding
+	return string(t.src[t.offset : t.read_offset]), .Invalid_Entity_Encoding
 }
 }
 
 
 /*
 /*

+ 21 - 23
core/encoding/xml/debug_print.odin

@@ -19,43 +19,39 @@ import "core:fmt"
 */
 */
 print :: proc(writer: io.Writer, doc: ^Document) -> (written: int, err: io.Error) {
 print :: proc(writer: io.Writer, doc: ^Document) -> (written: int, err: io.Error) {
 	if doc == nil { return }
 	if doc == nil { return }
-	using fmt
-
-	written += wprintf(writer, "[XML Prolog]\n")
+	written += fmt.wprintf(writer, "[XML Prolog]\n")
 
 
 	for attr in doc.prologue {
 	for attr in doc.prologue {
-		written += wprintf(writer, "\t%v: %v\n", attr.key, attr.val)
+		written += fmt.wprintf(writer, "\t%v: %v\n", attr.key, attr.val)
 	}
 	}
 
 
-	written += wprintf(writer, "[Encoding] %v\n", doc.encoding)
+	written += fmt.wprintf(writer, "[Encoding] %v\n", doc.encoding)
 
 
 	if len(doc.doctype.ident) > 0 {
 	if len(doc.doctype.ident) > 0 {
-		written += wprintf(writer, "[DOCTYPE]  %v\n", doc.doctype.ident)
+		written += fmt.wprintf(writer, "[DOCTYPE]  %v\n", doc.doctype.ident)
 
 
 		if len(doc.doctype.rest) > 0 {
 		if len(doc.doctype.rest) > 0 {
-		 	wprintf(writer, "\t%v\n", doc.doctype.rest)
+		 	fmt.wprintf(writer, "\t%v\n", doc.doctype.rest)
 		}
 		}
 	}
 	}
 
 
 	for comment in doc.comments {
 	for comment in doc.comments {
-		written += wprintf(writer, "[Pre-root comment]  %v\n", comment)
+		written += fmt.wprintf(writer, "[Pre-root comment]  %v\n", comment)
 	}
 	}
 
 
 	if len(doc.elements) > 0 {
 	if len(doc.elements) > 0 {
-	 	wprintln(writer, " --- ")
+	 	fmt.wprintln(writer, " --- ")
 	 	print_element(writer, doc, 0)
 	 	print_element(writer, doc, 0)
-	 	wprintln(writer, " --- ")
+	 	fmt.wprintln(writer, " --- ")
 	 }
 	 }
 
 
 	return written, .None
 	return written, .None
 }
 }
 
 
 print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, indent := 0) -> (written: int, err: io.Error) {
 print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID, indent := 0) -> (written: int, err: io.Error) {
-	using fmt
-
 	tab :: proc(writer: io.Writer, indent: int) {
 	tab :: proc(writer: io.Writer, indent: int) {
 		for _ in 0..=indent {
 		for _ in 0..=indent {
-			wprintf(writer, "\t")
+			fmt.wprintf(writer, "\t")
 		}
 		}
 	}
 	}
 
 
@@ -64,22 +60,24 @@ print_element :: proc(writer: io.Writer, doc: ^Document, element_id: Element_ID,
 	element := doc.elements[element_id]
 	element := doc.elements[element_id]
 
 
 	if element.kind == .Element {
 	if element.kind == .Element {
-		wprintf(writer, "<%v>\n", element.ident)
-		if len(element.value) > 0 {
-			tab(writer, indent + 1)
-			wprintf(writer, "[Value] %v\n", element.value)
+		fmt.wprintf(writer, "<%v>\n", element.ident)
+
+		for value in element.value {
+			switch v in value {
+			case string:
+				tab(writer, indent + 1)
+				fmt.wprintf(writer, "[Value] %v\n", v)
+			case Element_ID:
+				print_element(writer, doc, v, indent + 1)
+			}
 		}
 		}
 
 
 		for attr in element.attribs {
 		for attr in element.attribs {
 			tab(writer, indent + 1)
 			tab(writer, indent + 1)
-			wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val)
-		}
-
-		for child in element.children {
-			print_element(writer, doc, child, indent + 1)
+			fmt.wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val)
 		}
 		}
 	} else if element.kind == .Comment {
 	} else if element.kind == .Comment {
-		wprintf(writer, "[COMMENT] %v\n", element.value)
+		fmt.wprintf(writer, "[COMMENT] %v\n", element.value)
 	}
 	}
 
 
 	return written, .None
 	return written, .None

+ 3 - 3
core/encoding/xml/example/xml_example.odin

@@ -72,10 +72,10 @@ example :: proc() {
 	 	return
 	 	return
 	}
 	}
 
 
-	printf("Found `<charlist>` with %v children, %v elements total\n", len(docs[0].elements[charlist].children), docs[0].element_count)
+	printf("Found `<charlist>` with %v children, %v elements total\n", len(docs[0].elements[charlist].value), docs[0].element_count)
 
 
-	crc32 := doc_hash(docs[0])
-	printf("[%v] CRC32: 0x%08x\n", "🎉" if crc32 == 0xcaa042b9 else "🤬", crc32)
+	crc32 := doc_hash(docs[0], false)
+	printf("[%v] CRC32: 0x%08x\n", "🎉" if crc32 == 0x420dbac5 else "🤬", crc32)
 
 
 	for round in 0..<N {
 	for round in 0..<N {
 		defer xml.destroy(docs[round])
 		defer xml.destroy(docs[round])

+ 17 - 12
core/encoding/xml/helpers.odin

@@ -13,20 +13,25 @@ find_child_by_ident :: proc(doc: ^Document, parent_id: Element_ID, ident: string
 	tag := doc.elements[parent_id]
 	tag := doc.elements[parent_id]
 
 
 	count := 0
 	count := 0
-	for child_id in tag.children {
-		child := doc.elements[child_id]
-		/*
-			Skip commments. They have no name.
-		*/
-		if child.kind  != .Element                { continue }
+	for v in tag.value {
+		switch child_id in v {
+		case string: continue
+		case Element_ID:
+			child := doc.elements[child_id]
+			/*
+				Skip commments. They have no name.
+			*/
+			if child.kind  != .Element                { continue }
 
 
-		/*
-			If the ident matches and it's the nth such child, return it.
-		*/
-		if child.ident == ident {
-			if count == nth                       { return child_id, true }
-			count += 1
+			/*
+				If the ident matches and it's the nth such child, return it.
+			*/
+			if child.ident == ident {
+				if count == nth                       { return child_id, true }
+				count += 1
+			}
 		}
 		}
+
 	}
 	}
 	return 0, false
 	return 0, false
 }
 }

+ 16 - 16
core/encoding/xml/tokenizer.odin

@@ -125,38 +125,38 @@ error :: proc(t: ^Tokenizer, offset: int, msg: string, args: ..any) {
 }
 }
 
 
 @(optimization_mode="speed")
 @(optimization_mode="speed")
-advance_rune :: proc(using t: ^Tokenizer) {
+advance_rune :: proc(t: ^Tokenizer) {
 	#no_bounds_check {
 	#no_bounds_check {
 		/*
 		/*
 			Already bounds-checked here.
 			Already bounds-checked here.
 		*/
 		*/
-		if read_offset < len(src) {
-			offset = read_offset
-			if ch == '\n' {
-				line_offset = offset
-				line_count += 1
+		if t.read_offset < len(t.src) {
+			t.offset = t.read_offset
+			if t.ch == '\n' {
+				t.line_offset = t.offset
+				t.line_count += 1
 			}
 			}
-			r, w := rune(src[read_offset]), 1
+			r, w := rune(t.src[t.read_offset]), 1
 			switch {
 			switch {
 			case r == 0:
 			case r == 0:
 				error(t, t.offset, "illegal character NUL")
 				error(t, t.offset, "illegal character NUL")
 			case r >= utf8.RUNE_SELF:
 			case r >= utf8.RUNE_SELF:
-				r, w = #force_inline utf8.decode_rune_in_string(src[read_offset:])
+				r, w = #force_inline utf8.decode_rune_in_string(t.src[t.read_offset:])
 				if r == utf8.RUNE_ERROR && w == 1 {
 				if r == utf8.RUNE_ERROR && w == 1 {
 					error(t, t.offset, "illegal UTF-8 encoding")
 					error(t, t.offset, "illegal UTF-8 encoding")
-				} else if r == utf8.RUNE_BOM && offset > 0 {
+				} else if r == utf8.RUNE_BOM && t.offset > 0 {
 					error(t, t.offset, "illegal byte order mark")
 					error(t, t.offset, "illegal byte order mark")
 				}
 				}
 			}
 			}
-			read_offset += w
-			ch = r
+			t.read_offset += w
+			t.ch = r
 		} else {
 		} else {
-			offset = len(src)
-			if ch == '\n' {
-				line_offset = offset
-				line_count += 1
+			t.offset = len(t.src)
+			if t.ch == '\n' {
+				t.line_offset = t.offset
+				t.line_count += 1
 			}
 			}
-			ch = -1
+			t.ch = -1
 		}
 		}
 	}
 	}
 }
 }

+ 15 - 27
core/encoding/xml/xml_reader.odin

@@ -125,16 +125,19 @@ Document :: struct {
 
 
 Element :: struct {
 Element :: struct {
 	ident:   string,
 	ident:   string,
-	value:   string,
+	value:   [dynamic]Value,
 	attribs: Attributes,
 	attribs: Attributes,
 
 
 	kind: enum {
 	kind: enum {
 		Element = 0,
 		Element = 0,
 		Comment,
 		Comment,
 	},
 	},
-
 	parent:   Element_ID,
 	parent:   Element_ID,
-	children: [dynamic]Element_ID,
+}
+
+Value :: union {
+	string,
+	Element_ID,
 }
 }
 
 
 Attribute :: struct {
 Attribute :: struct {
@@ -247,9 +250,6 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 
 
 	err =            .Unexpected_Token
 	err =            .Unexpected_Token
 	element, parent: Element_ID
 	element, parent: Element_ID
-
-	tag_is_open   := false
-	first_element := true
 	open: Token
 	open: Token
 
 
 	/*
 	/*
@@ -275,16 +275,10 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 					e.g. <odin - Start of new element.
 					e.g. <odin - Start of new element.
 				*/
 				*/
 				element = new_element(doc)
 				element = new_element(doc)
-				tag_is_open = true
-
-				if first_element {
-					/*
-						First element.
-					*/
-					parent   = element
-					first_element = false
+				if element == 0 { // First Element
+					parent = element
 				} else {
 				} else {
-					append(&doc.elements[parent].children, element)
+					append(&doc.elements[parent].value, element)
 				}
 				}
 
 
 				doc.elements[element].parent = parent
 				doc.elements[element].parent = parent
@@ -324,7 +318,6 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 					expect(t, .Gt) or_return
 					expect(t, .Gt) or_return
 					parent      = doc.elements[element].parent
 					parent      = doc.elements[element].parent
 					element     = parent
 					element     = parent
-					tag_is_open = false
 
 
 				case:
 				case:
 					error(t, t.offset, "Expected close tag, got: %#v\n", end_token)
 					error(t, t.offset, "Expected close tag, got: %#v\n", end_token)
@@ -344,7 +337,6 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 				}
 				}
 				parent      = doc.elements[element].parent
 				parent      = doc.elements[element].parent
 				element     = parent
 				element     = parent
-				tag_is_open = false
 
 
 			} else if open.kind == .Exclaim {
 			} else if open.kind == .Exclaim {
 				/*
 				/*
@@ -392,8 +384,8 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 							el := new_element(doc)
 							el := new_element(doc)
 							doc.elements[el].parent = element
 							doc.elements[el].parent = element
 							doc.elements[el].kind   = .Comment
 							doc.elements[el].kind   = .Comment
-							doc.elements[el].value  = comment
-							append(&doc.elements[element].children, el)
+							append(&doc.elements[el].value, comment)
+							append(&doc.elements[element].value, el)
 						}
 						}
 					}
 					}
 
 
@@ -436,9 +428,6 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 			/*
 			/*
 				End of file.
 				End of file.
 			*/
 			*/
-			if tag_is_open {
-				return doc, .Premature_EOF
-			}
 			break loop
 			break loop
 
 
 		case:
 		case:
@@ -450,7 +439,7 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 			needs_processing |= .Decode_SGML_Entities in opts.flags
 			needs_processing |= .Decode_SGML_Entities in opts.flags
 
 
 			if !needs_processing {
 			if !needs_processing {
-				doc.elements[element].value = body_text
+				append(&doc.elements[element].value, body_text)
 				continue
 				continue
 			}
 			}
 
 
@@ -472,10 +461,10 @@ parse_bytes :: proc(data: []u8, options := DEFAULT_OPTIONS, path := "", error_ha
 
 
 			decoded, decode_err := entity.decode_xml(body_text, decode_opts)
 			decoded, decode_err := entity.decode_xml(body_text, decode_opts)
 			if decode_err == .None {
 			if decode_err == .None {
-				doc.elements[element].value = decoded
+				append(&doc.elements[element].value, decoded)
 				append(&doc.strings_to_free, decoded)
 				append(&doc.strings_to_free, decoded)
 			} else {
 			} else {
-				doc.elements[element].value = body_text
+				append(&doc.elements[element].value, body_text)
 			}
 			}
 		}
 		}
 	}
 	}
@@ -518,7 +507,7 @@ destroy :: proc(doc: ^Document) {
 
 
 	for el in doc.elements {
 	for el in doc.elements {
 		delete(el.attribs)
 		delete(el.attribs)
-		delete(el.children)
+		delete(el.value)
 	}
 	}
 	delete(doc.elements)
 	delete(doc.elements)
 
 
@@ -710,6 +699,5 @@ new_element :: proc(doc: ^Document) -> (id: Element_ID) {
 
 
 	cur := doc.element_count
 	cur := doc.element_count
 	doc.element_count += 1
 	doc.element_count += 1
-
 	return cur
 	return cur
 }
 }

+ 11 - 11
core/fmt/fmt.odin

@@ -835,22 +835,22 @@ int_from_arg :: proc(args: []any, arg_index: int) -> (int, int, bool) {
 // - fi: A pointer to an Info structure
 // - fi: A pointer to an Info structure
 // - verb: The invalid format verb
 // - verb: The invalid format verb
 //
 //
-fmt_bad_verb :: proc(using fi: ^Info, verb: rune) {
+fmt_bad_verb :: proc(fi: ^Info, verb: rune) {
 	prev_in_bad := fi.in_bad
 	prev_in_bad := fi.in_bad
 	defer fi.in_bad = prev_in_bad
 	defer fi.in_bad = prev_in_bad
 	fi.in_bad = true
 	fi.in_bad = true
 
 
-	io.write_string(writer, "%!", &fi.n)
-	io.write_rune(writer, verb, &fi.n)
-	io.write_byte(writer, '(', &fi.n)
-	if arg.id != nil {
-		reflect.write_typeid(writer, arg.id, &fi.n)
-		io.write_byte(writer, '=', &fi.n)
-		fmt_value(fi, arg, 'v')
+	io.write_string(fi.writer, "%!", &fi.n)
+	io.write_rune(fi.writer, verb, &fi.n)
+	io.write_byte(fi.writer, '(', &fi.n)
+	if fi.arg.id != nil {
+		reflect.write_typeid(fi.writer, fi.arg.id, &fi.n)
+		io.write_byte(fi.writer, '=', &fi.n)
+		fmt_value(fi, fi.arg, 'v')
 	} else {
 	} else {
-		io.write_string(writer, "<nil>", &fi.n)
+		io.write_string(fi.writer, "<nil>", &fi.n)
 	}
 	}
-	io.write_byte(writer, ')', &fi.n)
+	io.write_byte(fi.writer, ')', &fi.n)
 }
 }
 // Formats a boolean value according to the specified format verb
 // Formats a boolean value according to the specified format verb
 //
 //
@@ -859,7 +859,7 @@ fmt_bad_verb :: proc(using fi: ^Info, verb: rune) {
 // - b: The boolean value to format
 // - b: The boolean value to format
 // - verb: The format verb
 // - verb: The format verb
 //
 //
-fmt_bool :: proc(using fi: ^Info, b: bool, verb: rune) {
+fmt_bool :: proc(fi: ^Info, b: bool, verb: rune) {
 	switch verb {
 	switch verb {
 	case 't', 'v':
 	case 't', 'v':
 		fmt_string(fi, b ? "true" : "false", 's')
 		fmt_string(fi, b ? "true" : "false", 's')

+ 8 - 7
core/image/netpbm/helpers.odin

@@ -4,13 +4,14 @@ import "core:bytes"
 import "core:image"
 import "core:image"
 
 
 destroy :: proc(img: ^image.Image) -> bool {
 destroy :: proc(img: ^image.Image) -> bool {
-	if img == nil do return false
+	if img == nil {
+		return false
+	}
 
 
 	defer free(img)
 	defer free(img)
 	bytes.buffer_destroy(&img.pixels)
 	bytes.buffer_destroy(&img.pixels)
 
 
-	info, ok := img.metadata.(^image.Netpbm_Info)
-	if !ok do return false
+	info := img.metadata.(^image.Netpbm_Info) or_return
 
 
 	header_destroy(&info.header)
 	header_destroy(&info.header)
 	free(info)
 	free(info)
@@ -19,9 +20,9 @@ destroy :: proc(img: ^image.Image) -> bool {
 	return true
 	return true
 }
 }
 
 
-header_destroy :: proc(using header: ^Header) {
-	if format == .P7 && tupltype != "" {
-		delete(tupltype)
-		tupltype = ""
+header_destroy :: proc(header: ^Header) {
+	if header.format == .P7 && header.tupltype != "" {
+		delete(header.tupltype)
+		header.tupltype = ""
 	}
 	}
 }
 }

+ 1 - 0
core/image/netpbm/netpbm.odin

@@ -1,3 +1,4 @@
+//+vet !using-stmt
 package netpbm
 package netpbm
 
 
 import "core:bytes"
 import "core:bytes"

+ 3 - 4
core/image/png/helpers.odin

@@ -80,11 +80,10 @@ time :: proc(c: image.PNG_Chunk) -> (res: tIME, ok: bool) {
 }
 }
 
 
 core_time :: proc(c: image.PNG_Chunk) -> (t: coretime.Time, ok: bool) {
 core_time :: proc(c: image.PNG_Chunk) -> (t: coretime.Time, ok: bool) {
-	if png_time, png_ok := time(c); png_ok {
-		using png_time
+	if t, png_ok := time(c); png_ok {
 		return coretime.datetime_to_time(
 		return coretime.datetime_to_time(
-			int(year), int(month), int(day),
-			int(hour), int(minute), int(second),
+			int(t.year), int(t.month),  int(t.day),
+			int(t.hour), int(t.minute), int(t.second),
 		)
 		)
 	} else {
 	} else {
 		return {}, false
 		return {}, false

+ 8 - 8
core/image/png/png.odin

@@ -11,6 +11,7 @@
 // package png implements a PNG image reader
 // package png implements a PNG image reader
 //
 //
 // The PNG specification is at https://www.w3.org/TR/PNG/.
 // The PNG specification is at https://www.w3.org/TR/PNG/.
+//+vet !using-stmt
 package png
 package png
 
 
 import "core:compress"
 import "core:compress"
@@ -444,15 +445,14 @@ load_from_context :: proc(ctx: ^$C, options := Options{}, allocator := context.a
 			img.width  = int(header.width)
 			img.width  = int(header.width)
 			img.height = int(header.height)
 			img.height = int(header.height)
 
 
-			using header
 			h := image.PNG_IHDR{
 			h := image.PNG_IHDR{
-				width              = width,
-				height             = height,
-				bit_depth          = bit_depth,
-				color_type         = color_type,
-				compression_method = compression_method,
-				filter_method      = filter_method,
-				interlace_method   = interlace_method,
+				width              = header.width,
+				height             = header.height,
+				bit_depth          = header.bit_depth,
+				color_type         = header.color_type,
+				compression_method = header.compression_method,
+				filter_method      = header.filter_method,
+				interlace_method   = header.interlace_method,
 			}
 			}
 			info.header = h
 			info.header = h
 
 

+ 12 - 12
core/math/math.odin

@@ -2286,20 +2286,20 @@ F64_MASK  :: 0x7ff
 F64_SHIFT :: 64 - 12
 F64_SHIFT :: 64 - 12
 F64_BIAS  :: 0x3ff
 F64_BIAS  :: 0x3ff
 
 
-INF_F16     :f16: 0h7C00
-NEG_INF_F16 :f16: 0hFC00
+INF_F16     :: f16(0h7C00)
+NEG_INF_F16 :: f16(0hFC00)
 
 
-SNAN_F16    :f16: 0h7C01
-QNAN_F16    :f16: 0h7E01
+SNAN_F16    :: f16(0h7C01)
+QNAN_F16    :: f16(0h7E01)
 
 
-INF_F32     :f32: 0h7F80_0000
-NEG_INF_F32 :f32: 0hFF80_0000
+INF_F32     :: f32(0h7F80_0000)
+NEG_INF_F32 :: f32(0hFF80_0000)
 
 
-SNAN_F32    :f32: 0hFF80_0001
-QNAN_F32    :f32: 0hFFC0_0001
+SNAN_F32    :: f32(0hFF80_0001)
+QNAN_F32    :: f32(0hFFC0_0001)
 
 
-INF_F64     :f64: 0h7FF0_0000_0000_0000
-NEG_INF_F64 :f64: 0hFFF0_0000_0000_0000
+INF_F64     :: f64(0h7FF0_0000_0000_0000)
+NEG_INF_F64 :: f64(0hFFF0_0000_0000_0000)
 
 
-SNAN_F64    :f64: 0h7FF0_0000_0000_0001
-QNAN_F64    :f64: 0h7FF8_0000_0000_0001
+SNAN_F64    :: f64(0h7FF0_0000_0000_0001)
+QNAN_F64    :: f64(0h7FF8_0000_0000_0001)

+ 52 - 52
core/mem/allocators.odin

@@ -111,11 +111,11 @@ begin_arena_temp_memory :: proc(a: ^Arena) -> Arena_Temp_Memory {
 	return tmp
 	return tmp
 }
 }
 
 
-end_arena_temp_memory :: proc(using tmp: Arena_Temp_Memory) {
-	assert(arena.offset >= prev_offset)
-	assert(arena.temp_count > 0)
-	arena.offset = prev_offset
-	arena.temp_count -= 1
+end_arena_temp_memory :: proc(tmp: Arena_Temp_Memory) {
+	assert(tmp.arena.offset >= tmp.prev_offset)
+	assert(tmp.arena.temp_count > 0)
+	tmp.arena.offset = tmp.prev_offset
+	tmp.arena.temp_count -= 1
 }
 }
 
 
 
 
@@ -702,11 +702,11 @@ dynamic_pool_init :: proc(pool: ^Dynamic_Pool,
 	pool.         used_blocks.allocator = array_allocator
 	pool.         used_blocks.allocator = array_allocator
 }
 }
 
 
-dynamic_pool_destroy :: proc(using pool: ^Dynamic_Pool) {
+dynamic_pool_destroy :: proc(pool: ^Dynamic_Pool) {
 	dynamic_pool_free_all(pool)
 	dynamic_pool_free_all(pool)
-	delete(unused_blocks)
-	delete(used_blocks)
-	delete(out_band_allocations)
+	delete(pool.unused_blocks)
+	delete(pool.used_blocks)
+	delete(pool.out_band_allocations)
 
 
 	zero(pool, size_of(pool^))
 	zero(pool, size_of(pool^))
 }
 }
@@ -719,90 +719,90 @@ dynamic_pool_alloc :: proc(pool: ^Dynamic_Pool, bytes: int) -> (rawptr, Allocato
 }
 }
 
 
 @(require_results)
 @(require_results)
-dynamic_pool_alloc_bytes :: proc(using pool: ^Dynamic_Pool, bytes: int) -> ([]byte, Allocator_Error) {
-	cycle_new_block :: proc(using pool: ^Dynamic_Pool) -> (err: Allocator_Error) {
-		if block_allocator.procedure == nil {
+dynamic_pool_alloc_bytes :: proc(p: ^Dynamic_Pool, bytes: int) -> ([]byte, Allocator_Error) {
+	cycle_new_block :: proc(p: ^Dynamic_Pool) -> (err: Allocator_Error) {
+		if p.block_allocator.procedure == nil {
 			panic("You must call pool_init on a Pool before using it")
 			panic("You must call pool_init on a Pool before using it")
 		}
 		}
 
 
-		if current_block != nil {
-			append(&used_blocks, current_block)
+		if p.current_block != nil {
+			append(&p.used_blocks, p.current_block)
 		}
 		}
 
 
 		new_block: rawptr
 		new_block: rawptr
-		if len(unused_blocks) > 0 {
-			new_block = pop(&unused_blocks)
+		if len(p.unused_blocks) > 0 {
+			new_block = pop(&p.unused_blocks)
 		} else {
 		} else {
 			data: []byte
 			data: []byte
-			data, err = block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
-			                                           block_size, alignment,
-			                                           nil, 0)
+			data, err = p.block_allocator.procedure(p.block_allocator.data, Allocator_Mode.Alloc,
+			                                        p.block_size, p.alignment,
+			                                        nil, 0)
 			new_block = raw_data(data)
 			new_block = raw_data(data)
 		}
 		}
 
 
-		bytes_left = block_size
-		current_pos = new_block
-		current_block = new_block
+		p.bytes_left    = p.block_size
+		p.current_pos   = new_block
+		p.current_block = new_block
 		return
 		return
 	}
 	}
 
 
 	n := bytes
 	n := bytes
-	extra := alignment - (n % alignment)
+	extra := p.alignment - (n % p.alignment)
 	n += extra
 	n += extra
-	if n >= out_band_size {
-		assert(block_allocator.procedure != nil)
-		memory, err := block_allocator.procedure(block_allocator.data, Allocator_Mode.Alloc,
-			                                block_size, alignment,
-			                                nil, 0)
+	if n >= p.out_band_size {
+		assert(p.block_allocator.procedure != nil)
+		memory, err := p.block_allocator.procedure(p.block_allocator.data, Allocator_Mode.Alloc,
+		                                           p.block_size, p.alignment,
+		                                           nil, 0)
 		if memory != nil {
 		if memory != nil {
-			append(&out_band_allocations, raw_data(memory))
+			append(&p.out_band_allocations, raw_data(memory))
 		}
 		}
 		return memory, err
 		return memory, err
 	}
 	}
 
 
-	if bytes_left < n {
-		err := cycle_new_block(pool)
+	if p.bytes_left < n {
+		err := cycle_new_block(p)
 		if err != nil {
 		if err != nil {
 			return nil, err
 			return nil, err
 		}
 		}
-		if current_block == nil {
+		if p.current_block == nil {
 			return nil, .Out_Of_Memory
 			return nil, .Out_Of_Memory
 		}
 		}
 	}
 	}
 
 
-	memory := current_pos
-	current_pos = ptr_offset((^byte)(current_pos), n)
-	bytes_left -= n
-	return byte_slice(memory, bytes), nil
+	memory := p.current_pos
+	p.current_pos = ([^]byte)(p.current_pos)[n:]
+	p.bytes_left -= n
+	return ([^]byte)(memory)[:bytes], nil
 }
 }
 
 
 
 
-dynamic_pool_reset :: proc(using pool: ^Dynamic_Pool) {
-	if current_block != nil {
-		append(&unused_blocks, current_block)
-		current_block = nil
+dynamic_pool_reset :: proc(p: ^Dynamic_Pool) {
+	if p.current_block != nil {
+		append(&p.unused_blocks, p.current_block)
+		p.current_block = nil
 	}
 	}
 
 
-	for block in used_blocks {
-		append(&unused_blocks, block)
+	for block in p.used_blocks {
+		append(&p.unused_blocks, block)
 	}
 	}
-	clear(&used_blocks)
+	clear(&p.used_blocks)
 
 
-	for a in out_band_allocations {
-		free(a, block_allocator)
+	for a in p.out_band_allocations {
+		free(a, p.block_allocator)
 	}
 	}
-	clear(&out_band_allocations)
+	clear(&p.out_band_allocations)
 
 
-	bytes_left = 0 // Make new allocations call `cycle_new_block` again.
+	p.bytes_left = 0 // Make new allocations call `cycle_new_block` again.
 }
 }
 
 
-dynamic_pool_free_all :: proc(using pool: ^Dynamic_Pool) {
-	dynamic_pool_reset(pool)
+dynamic_pool_free_all :: proc(p: ^Dynamic_Pool) {
+	dynamic_pool_reset(p)
 
 
-	for block in unused_blocks {
-		free(block, block_allocator)
+	for block in p.unused_blocks {
+		free(block, p.block_allocator)
 	}
 	}
-	clear(&unused_blocks)
+	clear(&p.unused_blocks)
 }
 }
 
 
 
 

+ 35 - 35
core/net/url.odin

@@ -63,100 +63,100 @@ split_url :: proc(url: string, allocator := context.allocator) -> (scheme, host,
 }
 }
 
 
 join_url :: proc(scheme, host, path: string, queries: map[string]string, allocator := context.allocator) -> string {
 join_url :: proc(scheme, host, path: string, queries: map[string]string, allocator := context.allocator) -> string {
-	using strings
+	b := strings.builder_make(allocator)
+	strings.builder_grow(&b, len(scheme) + 3 + len(host) + 1 + len(path))
 
 
-	b := builder_make(allocator)
-	builder_grow(&b, len(scheme) + 3 + len(host) + 1 + len(path))
-
-	write_string(&b, scheme)
-	write_string(&b, "://")
-	write_string(&b, trim_space(host))
+	strings.write_string(&b, scheme)
+	strings.write_string(&b, "://")
+	strings.write_string(&b, strings.trim_space(host))
 
 
 	if path != "" {
 	if path != "" {
-		if path[0] != '/' do write_string(&b, "/")
-		write_string(&b, trim_space(path))
+		if path[0] != '/' {
+			strings.write_string(&b, "/")
+		}
+		strings.write_string(&b, strings.trim_space(path))
 	}
 	}
 
 
 
 
 	query_length := len(queries)
 	query_length := len(queries)
-	if query_length > 0 do write_string(&b, "?")
+	if query_length > 0 {
+		strings.write_string(&b, "?")
+	}
 	i := 0
 	i := 0
 	for query_name, query_value in queries {
 	for query_name, query_value in queries {
-		write_string(&b, query_name)
+		strings.write_string(&b, query_name)
 		if query_value != "" {
 		if query_value != "" {
-			write_string(&b, "=")
-			write_string(&b, query_value)
+			strings.write_string(&b, "=")
+			strings.write_string(&b, query_value)
 		}
 		}
 		if i < query_length - 1 {
 		if i < query_length - 1 {
-			write_string(&b, "&")
+			strings.write_string(&b, "&")
 		}
 		}
 		i += 1
 		i += 1
 	}
 	}
 
 
-	return to_string(b)
+	return strings.to_string(b)
 }
 }
 
 
 percent_encode :: proc(s: string, allocator := context.allocator) -> string {
 percent_encode :: proc(s: string, allocator := context.allocator) -> string {
-	using strings
-
-	b := builder_make(allocator)
-	builder_grow(&b, len(s) + 16) // NOTE(tetra): A reasonable number to allow for the number of things we need to escape.
+	b := strings.builder_make(allocator)
+	strings.builder_grow(&b, len(s) + 16) // NOTE(tetra): A reasonable number to allow for the number of things we need to escape.
 
 
 	for ch in s {
 	for ch in s {
 		switch ch {
 		switch ch {
 		case 'A'..='Z', 'a'..='z', '0'..='9', '-', '_', '.', '~':
 		case 'A'..='Z', 'a'..='z', '0'..='9', '-', '_', '.', '~':
-			write_rune(&b, ch)
+			strings.write_rune(&b, ch)
 		case:
 		case:
 			bytes, n := utf8.encode_rune(ch)
 			bytes, n := utf8.encode_rune(ch)
 			for byte in bytes[:n] {
 			for byte in bytes[:n] {
 				buf: [2]u8 = ---
 				buf: [2]u8 = ---
 				t := strconv.append_int(buf[:], i64(byte), 16)
 				t := strconv.append_int(buf[:], i64(byte), 16)
-				write_rune(&b, '%')
-				write_string(&b, t)
+				strings.write_rune(&b, '%')
+				strings.write_string(&b, t)
 			}
 			}
 		}
 		}
 	}
 	}
 
 
-	return to_string(b)
+	return strings.to_string(b)
 }
 }
 
 
 percent_decode :: proc(encoded_string: string, allocator := context.allocator) -> (decoded_string: string, ok: bool) {
 percent_decode :: proc(encoded_string: string, allocator := context.allocator) -> (decoded_string: string, ok: bool) {
-	using strings
-
-	b := builder_make(allocator)
-	builder_grow(&b, len(encoded_string))
-	defer if !ok do builder_destroy(&b)
+	b := strings.builder_make(allocator)
+	strings.builder_grow(&b, len(encoded_string))
+	defer if !ok do strings.builder_destroy(&b)
 
 
 	s := encoded_string
 	s := encoded_string
 
 
 	for len(s) > 0 {
 	for len(s) > 0 {
-		i := index_byte(s, '%')
+		i := strings.index_byte(s, '%')
 		if i == -1 {
 		if i == -1 {
-			write_string(&b, s) // no '%'s; the string is already decoded
+			strings.write_string(&b, s) // no '%'s; the string is already decoded
 			break
 			break
 		}
 		}
 
 
-		write_string(&b, s[:i])
+		strings.write_string(&b, s[:i])
 		s = s[i:]
 		s = s[i:]
 
 
 		if len(s) == 0 do return // percent without anything after it
 		if len(s) == 0 do return // percent without anything after it
 		s = s[1:]
 		s = s[1:]
 
 
 		if s[0] == '%' {
 		if s[0] == '%' {
-			write_byte(&b, '%')
+			strings.write_byte(&b, '%')
 			s = s[1:]
 			s = s[1:]
 			continue
 			continue
 		}
 		}
 
 
-		if len(s) < 2 do return // percent without encoded value
+		if len(s) < 2 {
+			return // percent without encoded value
+		}
 
 
 		val := hex.decode_sequence(s[:2]) or_return
 		val := hex.decode_sequence(s[:2]) or_return
-		write_byte(&b, val)
+		strings.write_byte(&b, val)
 		s = s[2:]
 		s = s[2:]
 	}
 	}
 
 
 	ok = true
 	ok = true
-	decoded_string = to_string(b)
+	decoded_string = strings.to_string(b)
 	return
 	return
 }
 }
 
 

+ 86 - 92
core/odin/printer/visit.odin

@@ -336,22 +336,20 @@ hint_current_line :: proc(p: ^Printer, hint: Line_Type) {
 
 
 @(private)
 @(private)
 visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
 visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
-	using ast
-
 	if decl == nil {
 	if decl == nil {
 		return
 		return
 	}
 	}
 
 
 	#partial switch v in decl.derived_stmt {
 	#partial switch v in decl.derived_stmt {
-	case ^Expr_Stmt:
+	case ^ast.Expr_Stmt:
 		move_line(p, decl.pos)
 		move_line(p, decl.pos)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		if p.config.semicolons {
 		if p.config.semicolons {
 			push_generic_token(p, .Semicolon, 0)
 			push_generic_token(p, .Semicolon, 0)
 		}
 		}
-	case ^When_Stmt:
-		visit_stmt(p, cast(^Stmt)decl)
-	case ^Foreign_Import_Decl:
+	case ^ast.When_Stmt:
+		visit_stmt(p, cast(^ast.Stmt)decl)
+	case ^ast.Foreign_Import_Decl:
 		if len(v.attributes) > 0 {
 		if len(v.attributes) > 0 {
 			sort.sort(sort_attribute(&v.attributes))
 			sort.sort(sort_attribute(&v.attributes))
 			move_line(p, v.attributes[0].pos)
 			move_line(p, v.attributes[0].pos)
@@ -370,7 +368,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
 		for path in v.fullpaths {
 		for path in v.fullpaths {
 			push_ident_token(p, path, 0)
 			push_ident_token(p, path, 0)
 		}
 		}
-	case ^Foreign_Block_Decl:
+	case ^ast.Foreign_Block_Decl:
 		if len(v.attributes) > 0 {
 		if len(v.attributes) > 0 {
 			sort.sort(sort_attribute(&v.attributes))
 			sort.sort(sort_attribute(&v.attributes))
 			move_line(p, v.attributes[0].pos)
 			move_line(p, v.attributes[0].pos)
@@ -383,7 +381,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
 
 
 		visit_expr(p, v.foreign_library)
 		visit_expr(p, v.foreign_library)
 		visit_stmt(p, v.body)
 		visit_stmt(p, v.body)
-	case ^Import_Decl:
+	case ^ast.Import_Decl:
 		move_line(p, decl.pos)
 		move_line(p, decl.pos)
 
 
 		if v.name.text != "" {
 		if v.name.text != "" {
@@ -395,7 +393,7 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
 			push_ident_token(p, v.fullpath, 1)
 			push_ident_token(p, v.fullpath, 1)
 		}
 		}
 
 
-	case ^Value_Decl:
+	case ^ast.Value_Decl:
 		if len(v.attributes) > 0 {
 		if len(v.attributes) > 0 {
 			sort.sort(sort_attribute(&v.attributes))
 			sort.sort(sort_attribute(&v.attributes))
 			move_line(p, v.attributes[0].pos)
 			move_line(p, v.attributes[0].pos)
@@ -447,9 +445,9 @@ visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
 
 
 		for value in v.values {
 		for value in v.values {
 			#partial switch a in value.derived {
 			#partial switch a in value.derived {
-			case ^Union_Type, ^Enum_Type, ^Struct_Type:
+			case ^ast.Union_Type, ^ast.Enum_Type, ^ast.Struct_Type:
 				add_semicolon = false || called_in_stmt
 				add_semicolon = false || called_in_stmt
-			case ^Proc_Lit:
+			case ^ast.Proc_Lit:
 				add_semicolon = false
 				add_semicolon = false
 			}
 			}
 		}
 		}
@@ -510,40 +508,38 @@ visit_attributes :: proc(p: ^Printer, attributes: [dynamic]^ast.Attribute) {
 
 
 @(private)
 @(private)
 visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Generic, empty_block := false, block_stmt := false) {
 visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Generic, empty_block := false, block_stmt := false) {
-	using ast
-
 	if stmt == nil {
 	if stmt == nil {
 		return
 		return
 	}
 	}
 
 
 
 
 	switch v in stmt.derived_stmt {
 	switch v in stmt.derived_stmt {
-	case ^Bad_Stmt:
-	case ^Bad_Decl:
-	case ^Package_Decl:
+	case ^ast.Bad_Stmt:
+	case ^ast.Bad_Decl:
+	case ^ast.Package_Decl:
 
 
-	case ^Empty_Stmt:
+	case ^ast.Empty_Stmt:
 		push_generic_token(p, .Semicolon, 0)
 		push_generic_token(p, .Semicolon, 0)
-	case ^Tag_Stmt:
+	case ^ast.Tag_Stmt:
 		push_generic_token(p, .Hash, 1)
 		push_generic_token(p, .Hash, 1)
 		push_generic_token(p, v.op.kind, 1, v.op.text)
 		push_generic_token(p, v.op.kind, 1, v.op.text)
 		visit_stmt(p, v.stmt)
 		visit_stmt(p, v.stmt)
 
 
 
 
-	case ^Import_Decl:
-		visit_decl(p, cast(^Decl)stmt, true)
+	case ^ast.Import_Decl:
+		visit_decl(p, cast(^ast.Decl)stmt, true)
 		return
 		return
-	case ^Value_Decl:
-		visit_decl(p, cast(^Decl)stmt, true)
+	case ^ast.Value_Decl:
+		visit_decl(p, cast(^ast.Decl)stmt, true)
 		return
 		return
-	case ^Foreign_Import_Decl:
-		visit_decl(p, cast(^Decl)stmt, true)
+	case ^ast.Foreign_Import_Decl:
+		visit_decl(p, cast(^ast.Decl)stmt, true)
 		return
 		return
-	case ^Foreign_Block_Decl:
-		visit_decl(p, cast(^Decl)stmt, true)
+	case ^ast.Foreign_Block_Decl:
+		visit_decl(p, cast(^ast.Decl)stmt, true)
 		return
 		return
 
 
-	case ^Using_Stmt:
+	case ^ast.Using_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		push_generic_token(p, .Using, 1)
 		push_generic_token(p, .Using, 1)
@@ -553,7 +549,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		if p.config.semicolons {
 		if p.config.semicolons {
 			push_generic_token(p, .Semicolon, 0)
 			push_generic_token(p, .Semicolon, 0)
 		}
 		}
-	case ^Block_Stmt:
+	case ^ast.Block_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		if v.pos.line == v.end.line {
 		if v.pos.line == v.end.line {
@@ -583,7 +579,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 				visit_end_brace(p, v.end)
 				visit_end_brace(p, v.end)
 			}
 			}
 		}
 		}
-	case ^If_Stmt:
+	case ^ast.If_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		if v.label != nil {
 		if v.label != nil {
@@ -606,7 +602,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 
 
 		uses_do := false
 		uses_do := false
 
 
-		if check_stmt, ok := v.body.derived.(^Block_Stmt); ok && check_stmt.uses_do {
+		if check_stmt, ok := v.body.derived.(^ast.Block_Stmt); ok && check_stmt.uses_do {
 			uses_do = true
 			uses_do = true
 		}
 		}
 
 
@@ -637,7 +633,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 
 
 			visit_stmt(p, v.else_stmt)
 			visit_stmt(p, v.else_stmt)
 		}
 		}
-	case ^Switch_Stmt:
+	case ^ast.Switch_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		if v.label != nil {
 		if v.label != nil {
@@ -665,7 +661,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 
 
 		visit_expr(p, v.cond)
 		visit_expr(p, v.cond)
 		visit_stmt(p, v.body)
 		visit_stmt(p, v.body)
-	case ^Case_Clause:
+	case ^ast.Case_Clause:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		if !p.config.indent_cases {
 		if !p.config.indent_cases {
@@ -689,7 +685,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		if !p.config.indent_cases {
 		if !p.config.indent_cases {
 			indent(p)
 			indent(p)
 		}
 		}
-	case ^Type_Switch_Stmt:
+	case ^ast.Type_Switch_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		hint_current_line(p, {.Switch_Stmt})
 		hint_current_line(p, {.Switch_Stmt})
@@ -707,7 +703,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 
 
 		visit_stmt(p, v.tag)
 		visit_stmt(p, v.tag)
 		visit_stmt(p, v.body)
 		visit_stmt(p, v.body)
-	case ^Assign_Stmt:
+	case ^ast.Assign_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		hint_current_line(p, {.Assign})
 		hint_current_line(p, {.Assign})
@@ -721,13 +717,13 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		if block_stmt && p.config.semicolons {
 		if block_stmt && p.config.semicolons {
 			push_generic_token(p, .Semicolon, 0)
 			push_generic_token(p, .Semicolon, 0)
 		}
 		}
-	case ^Expr_Stmt:
+	case ^ast.Expr_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		if block_stmt && p.config.semicolons {
 		if block_stmt && p.config.semicolons {
 			push_generic_token(p, .Semicolon, 0)
 			push_generic_token(p, .Semicolon, 0)
 		}
 		}
-	case ^For_Stmt:
+	case ^ast.For_Stmt:
 		// this should be simplified
 		// this should be simplified
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
@@ -764,7 +760,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 
 
 		visit_stmt(p, v.body)
 		visit_stmt(p, v.body)
 
 
-	case ^Inline_Range_Stmt:
+	case ^ast.Inline_Range_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		if v.label != nil {
 		if v.label != nil {
@@ -790,7 +786,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		visit_stmt(p, v.body)
 		visit_stmt(p, v.body)
 
 
-	case ^Range_Stmt:
+	case ^ast.Range_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		if v.label != nil {
 		if v.label != nil {
@@ -816,7 +812,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 
 
 		visit_stmt(p, v.body)
 		visit_stmt(p, v.body)
-	case ^Return_Stmt:
+	case ^ast.Return_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		push_generic_token(p, .Return, 1)
 		push_generic_token(p, .Return, 1)
@@ -828,7 +824,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		if block_stmt && p.config.semicolons {
 		if block_stmt && p.config.semicolons {
 			push_generic_token(p, .Semicolon, 0)
 			push_generic_token(p, .Semicolon, 0)
 		}
 		}
-	case ^Defer_Stmt:
+	case ^ast.Defer_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 		push_generic_token(p, .Defer, 0)
 		push_generic_token(p, .Defer, 0)
 
 
@@ -837,7 +833,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 		if p.config.semicolons {
 		if p.config.semicolons {
 			push_generic_token(p, .Semicolon, 0)
 			push_generic_token(p, .Semicolon, 0)
 		}
 		}
-	case ^When_Stmt:
+	case ^ast.When_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 		push_generic_token(p, .When, 1)
 		push_generic_token(p, .When, 1)
 		visit_expr(p, v.cond)
 		visit_expr(p, v.cond)
@@ -857,7 +853,7 @@ visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Gener
 			visit_stmt(p, v.else_stmt)
 			visit_stmt(p, v.else_stmt)
 		}
 		}
 
 
-	case ^Branch_Stmt:
+	case ^ast.Branch_Stmt:
 		move_line(p, v.pos)
 		move_line(p, v.pos)
 
 
 		push_generic_token(p, v.tok.kind, 0)
 		push_generic_token(p, v.tok.kind, 0)
@@ -921,8 +917,6 @@ push_poly_params :: proc(p: ^Printer, poly_params: ^ast.Field_List) {
 
 
 @(private)
 @(private)
 visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
-	using ast
-
 	if expr == nil {
 	if expr == nil {
 		return
 		return
 	}
 	}
@@ -930,14 +924,14 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 	set_source_position(p, expr.pos)
 	set_source_position(p, expr.pos)
 
 
 	switch v in expr.derived_expr {
 	switch v in expr.derived_expr {
-	case ^Bad_Expr:
+	case ^ast.Bad_Expr:
 
 
-	case ^Tag_Expr:
+	case ^ast.Tag_Expr:
 		push_generic_token(p, .Hash, 1)
 		push_generic_token(p, .Hash, 1)
 		push_generic_token(p, v.op.kind, 1, v.op.text)
 		push_generic_token(p, v.op.kind, 1, v.op.text)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 
 
-	case ^Inline_Asm_Expr:
+	case ^ast.Inline_Asm_Expr:
 		push_generic_token(p, v.tok.kind, 1, v.tok.text)
 		push_generic_token(p, v.tok.kind, 1, v.tok.text)
 
 
 		push_generic_token(p, .Open_Paren, 1)
 		push_generic_token(p, .Open_Paren, 1)
@@ -954,42 +948,42 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 		push_generic_token(p, .Comma, 0)
 		push_generic_token(p, .Comma, 0)
 		visit_expr(p, v.constraints_string)
 		visit_expr(p, v.constraints_string)
 		push_generic_token(p, .Close_Brace, 0)
 		push_generic_token(p, .Close_Brace, 0)
-	case ^Undef:
+	case ^ast.Undef:
 		push_generic_token(p, .Undef, 1)
 		push_generic_token(p, .Undef, 1)
-	case ^Auto_Cast:
+	case ^ast.Auto_Cast:
 		push_generic_token(p, v.op.kind, 1)
 		push_generic_token(p, v.op.kind, 1)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
-	case ^Ternary_If_Expr:
+	case ^ast.Ternary_If_Expr:
 		visit_expr(p, v.x)
 		visit_expr(p, v.x)
 		push_generic_token(p, v.op1.kind, 1)
 		push_generic_token(p, v.op1.kind, 1)
 		visit_expr(p, v.cond)
 		visit_expr(p, v.cond)
 		push_generic_token(p, v.op2.kind, 1)
 		push_generic_token(p, v.op2.kind, 1)
 		visit_expr(p, v.y)
 		visit_expr(p, v.y)
-	case ^Ternary_When_Expr:
+	case ^ast.Ternary_When_Expr:
 		visit_expr(p, v.x)
 		visit_expr(p, v.x)
 		push_generic_token(p, v.op1.kind, 1)
 		push_generic_token(p, v.op1.kind, 1)
 		visit_expr(p, v.cond)
 		visit_expr(p, v.cond)
 		push_generic_token(p, v.op2.kind, 1)
 		push_generic_token(p, v.op2.kind, 1)
 		visit_expr(p, v.y)
 		visit_expr(p, v.y)
-	case ^Or_Else_Expr:
+	case ^ast.Or_Else_Expr:
 		visit_expr(p, v.x)
 		visit_expr(p, v.x)
 		push_generic_token(p, v.token.kind, 1)
 		push_generic_token(p, v.token.kind, 1)
 		visit_expr(p, v.y)
 		visit_expr(p, v.y)
-	case ^Or_Return_Expr:
+	case ^ast.Or_Return_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, v.token.kind, 1)
 		push_generic_token(p, v.token.kind, 1)
-	case ^Selector_Call_Expr:
+	case ^ast.Selector_Call_Expr:
 		visit_expr(p, v.call.expr)
 		visit_expr(p, v.call.expr)
 		push_generic_token(p, .Open_Paren, 1)
 		push_generic_token(p, .Open_Paren, 1)
 		visit_exprs(p, v.call.args, {.Add_Comma})
 		visit_exprs(p, v.call.args, {.Add_Comma})
 		push_generic_token(p, .Close_Paren, 0)
 		push_generic_token(p, .Close_Paren, 0)
-	case ^Ellipsis:
+	case ^ast.Ellipsis:
 		push_generic_token(p, .Ellipsis, 1)
 		push_generic_token(p, .Ellipsis, 1)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
-	case ^Relative_Type:
+	case ^ast.Relative_Type:
 		visit_expr(p, v.tag)
 		visit_expr(p, v.tag)
 		visit_expr(p, v.type)
 		visit_expr(p, v.type)
-	case ^Slice_Expr:
+	case ^ast.Slice_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, .Open_Bracket, 0)
 		push_generic_token(p, .Open_Bracket, 0)
 		visit_expr(p, v.low)
 		visit_expr(p, v.low)
@@ -999,37 +993,37 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 			visit_expr(p, v.high)
 			visit_expr(p, v.high)
 		}
 		}
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
-	case ^Ident:
+	case ^ast.Ident:
 		if .Enforce_Poly_Names in options {
 		if .Enforce_Poly_Names in options {
 			push_generic_token(p, .Dollar, 1)
 			push_generic_token(p, .Dollar, 1)
 			push_ident_token(p, v.name, 0)
 			push_ident_token(p, v.name, 0)
 		} else {
 		} else {
 			push_ident_token(p, v.name, 1)
 			push_ident_token(p, v.name, 1)
 		}
 		}
-	case ^Deref_Expr:
+	case ^ast.Deref_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, v.op.kind, 0)
 		push_generic_token(p, v.op.kind, 0)
-	case ^Type_Cast:
+	case ^ast.Type_Cast:
 		push_generic_token(p, v.tok.kind, 1)
 		push_generic_token(p, v.tok.kind, 1)
 		push_generic_token(p, .Open_Paren, 0)
 		push_generic_token(p, .Open_Paren, 0)
 		visit_expr(p, v.type)
 		visit_expr(p, v.type)
 		push_generic_token(p, .Close_Paren, 0)
 		push_generic_token(p, .Close_Paren, 0)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
-	case ^Basic_Directive:
+	case ^ast.Basic_Directive:
 		push_generic_token(p, v.tok.kind, 1)
 		push_generic_token(p, v.tok.kind, 1)
 		push_ident_token(p, v.name, 0)
 		push_ident_token(p, v.name, 0)
-	case ^Distinct_Type:
+	case ^ast.Distinct_Type:
 		push_generic_token(p, .Distinct, 1)
 		push_generic_token(p, .Distinct, 1)
 		visit_expr(p, v.type)
 		visit_expr(p, v.type)
-	case ^Dynamic_Array_Type:
+	case ^ast.Dynamic_Array_Type:
 		visit_expr(p, v.tag)
 		visit_expr(p, v.tag)
 		push_generic_token(p, .Open_Bracket, 1)
 		push_generic_token(p, .Open_Bracket, 1)
 		push_generic_token(p, .Dynamic, 0)
 		push_generic_token(p, .Dynamic, 0)
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.elem)
 		visit_expr(p, v.elem)
-	case ^Bit_Set_Type:
+	case ^ast.Bit_Set_Type:
 		push_generic_token(p, .Bit_Set, 1)
 		push_generic_token(p, .Bit_Set, 1)
 		push_generic_token(p, .Open_Bracket, 0)
 		push_generic_token(p, .Open_Bracket, 0)
 
 
@@ -1041,7 +1035,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 		}
 		}
 
 
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
-	case ^Union_Type:
+	case ^ast.Union_Type:
 		push_generic_token(p, .Union, 1)
 		push_generic_token(p, .Union, 1)
 
 
 		push_poly_params(p, v.poly_params)
 		push_poly_params(p, v.poly_params)
@@ -1066,7 +1060,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 			visit_exprs(p, v.variants, {.Add_Comma, .Trailing})
 			visit_exprs(p, v.variants, {.Add_Comma, .Trailing})
 			visit_end_brace(p, v.end)
 			visit_end_brace(p, v.end)
 		}
 		}
-	case ^Enum_Type:
+	case ^ast.Enum_Type:
 		push_generic_token(p, .Enum, 1)
 		push_generic_token(p, .Enum, 1)
 
 
 		hint_current_line(p, {.Enum})
 		hint_current_line(p, {.Enum})
@@ -1089,7 +1083,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 		}
 		}
 
 
 		set_source_position(p, v.end)
 		set_source_position(p, v.end)
-	case ^Struct_Type:
+	case ^ast.Struct_Type:
 		push_generic_token(p, .Struct, 1)
 		push_generic_token(p, .Struct, 1)
 
 
 		hint_current_line(p, {.Struct})
 		hint_current_line(p, {.Struct})
@@ -1124,7 +1118,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 		}
 		}
 
 
 		set_source_position(p, v.end)
 		set_source_position(p, v.end)
-	case ^Proc_Lit:
+	case ^ast.Proc_Lit:
 		switch v.inlining {
 		switch v.inlining {
 		case .None:
 		case .None:
 		case .Inline:
 		case .Inline:
@@ -1143,16 +1137,16 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 		} else {
 		} else {
 			push_generic_token(p, .Undef, 1)
 			push_generic_token(p, .Undef, 1)
 		}
 		}
-	case ^Proc_Type:
+	case ^ast.Proc_Type:
 		visit_proc_type(p, v)
 		visit_proc_type(p, v)
-	case ^Basic_Lit:
+	case ^ast.Basic_Lit:
 		push_generic_token(p, v.tok.kind, 1, v.tok.text)
 		push_generic_token(p, v.tok.kind, 1, v.tok.text)
-	case ^Binary_Expr:
+	case ^ast.Binary_Expr:
 		visit_binary_expr(p, v)
 		visit_binary_expr(p, v)
-	case ^Implicit_Selector_Expr:
+	case ^ast.Implicit_Selector_Expr:
 		push_generic_token(p, .Period, 1)
 		push_generic_token(p, .Period, 1)
 		push_ident_token(p, v.field.name, 0)
 		push_ident_token(p, v.field.name, 0)
-	case ^Call_Expr:
+	case ^ast.Call_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 
 
 		push_format_token(p,
 		push_format_token(p,
@@ -1167,34 +1161,34 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 
 
 		visit_call_exprs(p, v.args, v.ellipsis.kind == .Ellipsis)
 		visit_call_exprs(p, v.args, v.ellipsis.kind == .Ellipsis)
 		push_generic_token(p, .Close_Paren, 0)
 		push_generic_token(p, .Close_Paren, 0)
-	case ^Typeid_Type:
+	case ^ast.Typeid_Type:
 		push_generic_token(p, .Typeid, 1)
 		push_generic_token(p, .Typeid, 1)
 
 
 		if v.specialization != nil {
 		if v.specialization != nil {
 			push_generic_token(p, .Quo, 0)
 			push_generic_token(p, .Quo, 0)
 			visit_expr(p, v.specialization)
 			visit_expr(p, v.specialization)
 		}
 		}
-	case ^Selector_Expr:
+	case ^ast.Selector_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, v.op.kind, 0)
 		push_generic_token(p, v.op.kind, 0)
 		visit_expr(p, v.field)
 		visit_expr(p, v.field)
-	case ^Paren_Expr:
+	case ^ast.Paren_Expr:
 		push_generic_token(p, .Open_Paren, 1)
 		push_generic_token(p, .Open_Paren, 1)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, .Close_Paren, 0)
 		push_generic_token(p, .Close_Paren, 0)
-	case ^Index_Expr:
+	case ^ast.Index_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, .Open_Bracket, 0)
 		push_generic_token(p, .Open_Bracket, 0)
 		visit_expr(p, v.index)
 		visit_expr(p, v.index)
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
-	case ^Matrix_Index_Expr:
+	case ^ast.Matrix_Index_Expr:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 		push_generic_token(p, .Open_Bracket, 0)
 		push_generic_token(p, .Open_Bracket, 0)
 		visit_expr(p, v.row_index)
 		visit_expr(p, v.row_index)
 		push_generic_token(p, .Comma, 0)
 		push_generic_token(p, .Comma, 0)
 		visit_expr(p, v.column_index)
 		visit_expr(p, v.column_index)
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
-	case ^Proc_Group:
+	case ^ast.Proc_Group:
 		push_generic_token(p, v.tok.kind, 1)
 		push_generic_token(p, v.tok.kind, 1)
 
 
 		if len(v.args) != 0 && v.pos.line != v.args[len(v.args) - 1].pos.line {
 		if len(v.args) != 0 && v.pos.line != v.args[len(v.args) - 1].pos.line {
@@ -1209,7 +1203,7 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 			push_generic_token(p, .Close_Brace, 0)
 			push_generic_token(p, .Close_Brace, 0)
 		}
 		}
 
 
-	case ^Comp_Lit:
+	case ^ast.Comp_Lit:
 		if v.type != nil {
 		if v.type != nil {
 			visit_expr(p, v.type)
 			visit_expr(p, v.type)
 		}
 		}
@@ -1226,18 +1220,18 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 			push_generic_token(p, .Close_Brace, 0)
 			push_generic_token(p, .Close_Brace, 0)
 		}
 		}
 
 
-	case ^Unary_Expr:
+	case ^ast.Unary_Expr:
 		push_generic_token(p, v.op.kind, 1)
 		push_generic_token(p, v.op.kind, 1)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
-	case ^Field_Value:
+	case ^ast.Field_Value:
 		visit_expr(p, v.field)
 		visit_expr(p, v.field)
 		push_generic_token(p, .Eq, 1)
 		push_generic_token(p, .Eq, 1)
 		visit_expr(p, v.value)
 		visit_expr(p, v.value)
-	case ^Type_Assertion:
+	case ^ast.Type_Assertion:
 		visit_expr(p, v.expr)
 		visit_expr(p, v.expr)
 
 
-		if unary, ok := v.type.derived.(^Unary_Expr); ok && unary.op.text == "?" {
+		if unary, ok := v.type.derived.(^ast.Unary_Expr); ok && unary.op.text == "?" {
 			push_generic_token(p, .Period, 0)
 			push_generic_token(p, .Period, 0)
 			visit_expr(p, v.type)
 			visit_expr(p, v.type)
 		} else {
 		} else {
@@ -1247,13 +1241,13 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 			push_generic_token(p, .Close_Paren, 0)
 			push_generic_token(p, .Close_Paren, 0)
 		}
 		}
 
 
-	case ^Pointer_Type:
+	case ^ast.Pointer_Type:
 		push_generic_token(p, .Pointer, 1)
 		push_generic_token(p, .Pointer, 1)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.elem)
 		visit_expr(p, v.elem)
-	case ^Implicit:
+	case ^ast.Implicit:
 		push_generic_token(p, v.tok.kind, 1)
 		push_generic_token(p, v.tok.kind, 1)
-	case ^Poly_Type:
+	case ^ast.Poly_Type:
 		push_generic_token(p, .Dollar, 1)
 		push_generic_token(p, .Dollar, 1)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.type)
 		visit_expr(p, v.type)
@@ -1263,28 +1257,28 @@ visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
 			merge_next_token(p)
 			merge_next_token(p)
 			visit_expr(p, v.specialization)
 			visit_expr(p, v.specialization)
 		}
 		}
-	case ^Array_Type:
+	case ^ast.Array_Type:
 		visit_expr(p, v.tag)
 		visit_expr(p, v.tag)
 		push_generic_token(p, .Open_Bracket, 1)
 		push_generic_token(p, .Open_Bracket, 1)
 		visit_expr(p, v.len)
 		visit_expr(p, v.len)
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.elem)
 		visit_expr(p, v.elem)
-	case ^Map_Type:
+	case ^ast.Map_Type:
 		push_generic_token(p, .Map, 1)
 		push_generic_token(p, .Map, 1)
 		push_generic_token(p, .Open_Bracket, 0)
 		push_generic_token(p, .Open_Bracket, 0)
 		visit_expr(p, v.key)
 		visit_expr(p, v.key)
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
 		merge_next_token(p)
 		merge_next_token(p)
 		visit_expr(p, v.value)
 		visit_expr(p, v.value)
-	case ^Helper_Type:
+	case ^ast.Helper_Type:
 		visit_expr(p, v.type)
 		visit_expr(p, v.type)
-	case ^Multi_Pointer_Type:
+	case ^ast.Multi_Pointer_Type:
 		push_generic_token(p, .Open_Bracket, 1)
 		push_generic_token(p, .Open_Bracket, 1)
 		push_generic_token(p, .Pointer, 0)
 		push_generic_token(p, .Pointer, 0)
 		push_generic_token(p, .Close_Bracket, 0)
 		push_generic_token(p, .Close_Bracket, 0)
 		visit_expr(p, v.elem)
 		visit_expr(p, v.elem)
-	case ^Matrix_Type:
+	case ^ast.Matrix_Type:
 		push_generic_token(p, .Matrix, 1)
 		push_generic_token(p, .Matrix, 1)
 		push_generic_token(p, .Open_Bracket, 0)
 		push_generic_token(p, .Open_Bracket, 0)
 		visit_expr(p, v.row_count)
 		visit_expr(p, v.row_count)

+ 16 - 16
core/odin/tokenizer/tokenizer.odin

@@ -75,34 +75,34 @@ error :: proc(t: ^Tokenizer, offset: int, msg: string, args: ..any) {
 	t.error_count += 1
 	t.error_count += 1
 }
 }
 
 
-advance_rune :: proc(using t: ^Tokenizer) {
-	if read_offset < len(src) {
-		offset = read_offset
-		if ch == '\n' {
-			line_offset = offset
-			line_count += 1
+advance_rune :: proc(t: ^Tokenizer) {
+	if t.read_offset < len(t.src) {
+		t.offset = t.read_offset
+		if t.ch == '\n' {
+			t.line_offset = t.offset
+			t.line_count += 1
 		}
 		}
-		r, w := rune(src[read_offset]), 1
+		r, w := rune(t.src[t.read_offset]), 1
 		switch {
 		switch {
 		case r == 0:
 		case r == 0:
 			error(t, t.offset, "illegal character NUL")
 			error(t, t.offset, "illegal character NUL")
 		case r >= utf8.RUNE_SELF:
 		case r >= utf8.RUNE_SELF:
-			r, w = utf8.decode_rune_in_string(src[read_offset:])
+			r, w = utf8.decode_rune_in_string(t.src[t.read_offset:])
 			if r == utf8.RUNE_ERROR && w == 1 {
 			if r == utf8.RUNE_ERROR && w == 1 {
 				error(t, t.offset, "illegal UTF-8 encoding")
 				error(t, t.offset, "illegal UTF-8 encoding")
-			} else if r == utf8.RUNE_BOM && offset > 0 {
+			} else if r == utf8.RUNE_BOM && t.offset > 0 {
 				error(t, t.offset, "illegal byte order mark")
 				error(t, t.offset, "illegal byte order mark")
 			}
 			}
 		}
 		}
-		read_offset += w
-		ch = r
+		t.read_offset += w
+		t.ch = r
 	} else {
 	} else {
-		offset = len(src)
-		if ch == '\n' {
-			line_offset = offset
-			line_count += 1
+		t.offset = len(t.src)
+		if t.ch == '\n' {
+			t.line_offset = t.offset
+			t.line_count += 1
 		}
 		}
-		ch = -1
+		t.ch = -1
 	}
 	}
 }
 }
 
 

+ 118 - 98
core/runtime/dynamic_map_internal.odin

@@ -414,68 +414,21 @@ map_insert_hash_dynamic :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^
 	tk := map_cell_index_dynamic(sk, info.ks, 1)
 	tk := map_cell_index_dynamic(sk, info.ks, 1)
 	tv := map_cell_index_dynamic(sv, info.vs, 1)
 	tv := map_cell_index_dynamic(sv, info.vs, 1)
 
 
-	for {
-		hp := &hs[pos]
-		element_hash := hp^
+	swap_loop: for {
+		element_hash := hs[pos]
 
 
 		if map_hash_is_empty(element_hash) {
 		if map_hash_is_empty(element_hash) {
-			kp := map_cell_index_dynamic(ks, info.ks, pos)
-			vp := map_cell_index_dynamic(vs, info.vs, pos)
-			intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(k), size_of_k)
-			intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(v), size_of_v)
-			hp^ = h
+			k_dst := map_cell_index_dynamic(ks, info.ks, pos)
+			v_dst := map_cell_index_dynamic(vs, info.vs, pos)
+			intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k)
+			intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v)
+			hs[pos] = h
 
 
-			return result if result != 0 else vp
+			return result if result != 0 else v_dst
 		}
 		}
 
 
 		if map_hash_is_deleted(element_hash) {
 		if map_hash_is_deleted(element_hash) {
-			next_pos := (pos + 1) & mask
-
-			// backward shift
-			for !map_hash_is_empty(hs[next_pos]) {
-				probe_distance := map_probe_distance(m^, hs[next_pos], next_pos)
-				if probe_distance == 0 {
-					break
-				}
-				probe_distance -= 1
-
-				kp := map_cell_index_dynamic(ks, info.ks, pos)
-				vp := map_cell_index_dynamic(vs, info.vs, pos)
-				kn := map_cell_index_dynamic(ks, info.ks, next_pos)
-				vn := map_cell_index_dynamic(vs, info.vs, next_pos)
-
-				if distance > probe_distance {
-					if result == 0 {
-						result = vp
-					}
-					// move stored into pos; store next
-					intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(k), size_of_k)
-					intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(v), size_of_v)
-					hs[pos] = h
-
-					intrinsics.mem_copy_non_overlapping(rawptr(k), rawptr(kn), size_of_k)
-					intrinsics.mem_copy_non_overlapping(rawptr(v), rawptr(vn), size_of_v)
-					h = hs[next_pos]
-				} else {
-					// move next back 1
-					intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(kn), size_of_k)
-					intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(vn), size_of_v)
-					hs[pos] = hs[next_pos]
-					distance = probe_distance
-				}
-				hs[next_pos] = 0
-				pos = (pos + 1) & mask
-				next_pos = (next_pos + 1) & mask
-				distance += 1
-			}
-
-			kp := map_cell_index_dynamic(ks, info.ks, pos)
-			vp := map_cell_index_dynamic(vs, info.vs, pos)
-			intrinsics.mem_copy_non_overlapping(rawptr(kp), rawptr(k), size_of_k)
-			intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(v), size_of_v)
-			hs[pos] = h
-
-			return result if result != 0 else vp
+			break swap_loop
 		}
 		}
 
 
 		if probe_distance := map_probe_distance(m^, element_hash, pos); distance > probe_distance {
 		if probe_distance := map_probe_distance(m^, element_hash, pos); distance > probe_distance {
@@ -495,8 +448,8 @@ map_insert_hash_dynamic :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^
 			intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(tv), size_of_v)
 			intrinsics.mem_copy_non_overlapping(rawptr(vp), rawptr(tv), size_of_v)
 
 
 			th := h
 			th := h
-			h = hp^
-			hp^ = th
+			h = hs[pos]
+			hs[pos] = th
 
 
 			distance = probe_distance
 			distance = probe_distance
 		}
 		}
@@ -504,6 +457,103 @@ map_insert_hash_dynamic :: proc "odin" (#no_alias m: ^Raw_Map, #no_alias info: ^
 		pos = (pos + 1) & mask
 		pos = (pos + 1) & mask
 		distance += 1
 		distance += 1
 	}
 	}
+
+	// backward shift loop
+	hs[pos] = 0
+	look_ahead: uintptr = 1
+	for {
+		la_pos := (pos + look_ahead) & mask
+		element_hash := hs[la_pos]
+
+		if map_hash_is_deleted(element_hash) {
+			look_ahead += 1
+			hs[la_pos] = 0
+			continue
+		}
+
+		k_dst := map_cell_index_dynamic(ks, info.ks, pos)
+		v_dst := map_cell_index_dynamic(vs, info.vs, pos)
+
+		if map_hash_is_empty(element_hash) {
+			intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k)
+			intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v)
+			hs[pos] = h
+
+			return result if result != 0 else v_dst
+		}
+
+		k_src := map_cell_index_dynamic(ks, info.ks, la_pos)
+		v_src := map_cell_index_dynamic(vs, info.vs, la_pos)
+		probe_distance := map_probe_distance(m^, element_hash, la_pos)
+
+		if probe_distance < look_ahead {
+			// probed can be made ideal while placing saved (ending condition)
+			if result == 0 {
+				result = v_dst
+			}
+			intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k)
+			intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v)
+			hs[pos] = h
+
+			// This will be an ideal move
+			pos = (la_pos - probe_distance) & mask
+			look_ahead -= probe_distance
+
+			// shift until we hit ideal/empty
+			for probe_distance != 0 {
+				k_dst = map_cell_index_dynamic(ks, info.ks, pos)
+				v_dst = map_cell_index_dynamic(vs, info.vs, pos)
+
+				intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k_src), size_of_k)
+				intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v_src), size_of_v)
+				hs[pos] = element_hash
+				hs[la_pos] = 0
+
+				pos = (pos + 1) & mask
+				la_pos = (la_pos + 1) & mask
+				look_ahead = (la_pos - pos) & mask
+				element_hash = hs[la_pos]
+				if map_hash_is_empty(element_hash) {
+					return
+				}
+
+				probe_distance = map_probe_distance(m^, element_hash, la_pos)
+				if probe_distance == 0 {
+					return
+				}
+				// can be ideal?
+				if probe_distance < look_ahead {
+					pos = (la_pos - probe_distance) & mask
+				}
+				k_src = map_cell_index_dynamic(ks, info.ks, la_pos)
+				v_src = map_cell_index_dynamic(vs, info.vs, la_pos)
+			}
+			return
+		} else if distance < probe_distance - look_ahead {
+			// shift back probed
+			intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k_src), size_of_k)
+			intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v_src), size_of_v)
+			hs[pos] = element_hash
+			hs[la_pos] = 0
+		} else {
+			// place saved, save probed
+			if result == 0 {
+				result = v_dst
+			}
+			intrinsics.mem_copy_non_overlapping(rawptr(k_dst), rawptr(k), size_of_k)
+			intrinsics.mem_copy_non_overlapping(rawptr(v_dst), rawptr(v), size_of_v)
+			hs[pos] = h
+
+			intrinsics.mem_copy_non_overlapping(rawptr(k), rawptr(k_src), size_of_k)
+			intrinsics.mem_copy_non_overlapping(rawptr(v), rawptr(v_src), size_of_v)
+			h = hs[la_pos]
+			hs[la_pos] = 0
+			distance = probe_distance - look_ahead
+		}
+
+		pos = (pos + 1) & mask
+		distance += 1
+	}
 }
 }
 
 
 @(require_results)
 @(require_results)
@@ -696,49 +746,19 @@ map_erase_dynamic :: #force_inline proc "contextless" (#no_alias m: ^Raw_Map, #n
 	m.len -= 1
 	m.len -= 1
 	ok = true
 	ok = true
 
 
-	{ // coalesce tombstones
-		// HACK NOTE(bill): This is an ugly bodge but it is coalescing the tombstone slots
-		mask := (uintptr(1)<<map_log2_cap(m^)) - 1
-		curr_index := uintptr(index)
-
-		// TODO(bill): determine a good value for this empirically
-		// if we do not implement backward shift deletion
-		PROBE_COUNT :: 8
-		for _ in 0..<PROBE_COUNT {
-			next_index := (curr_index + 1) & mask
-			if next_index == index {
-				// looped around
-				break
-			}
-
-			// if the next element is empty or has zero probe distance, then any lookup
-			// will always fail on the next, so we can clear both of them
-			hash := hs[next_index]
-			if map_hash_is_empty(hash) || map_probe_distance(m^, hash, next_index) == 0 {
-				hs[curr_index] = 0
-				return
-			}
-
-			// now the next element will have a probe count of at least one,
-			// so it can use the delete slot instead
-			hs[curr_index] = hs[next_index]
-
-			mem_copy_non_overlapping(
-				rawptr(map_cell_index_dynamic(ks, info.ks, curr_index)),
-				rawptr(map_cell_index_dynamic(ks, info.ks, next_index)),
-				int(info.ks.size_of_type),
-			)
-			mem_copy_non_overlapping(
-				rawptr(map_cell_index_dynamic(vs, info.vs, curr_index)),
-				rawptr(map_cell_index_dynamic(vs, info.vs, next_index)),
-				int(info.vs.size_of_type),
-			)
-
-			curr_index = next_index
-		}
+	mask := (uintptr(1)<<map_log2_cap(m^)) - 1
+	curr_index := uintptr(index)
+	next_index := (curr_index + 1) & mask
 
 
+	// if the next element is empty or has zero probe distance, then any lookup
+	// will always fail on the next, so we can clear both of them
+	hash := hs[next_index]
+	if map_hash_is_empty(hash) || map_probe_distance(m^, hash, next_index) == 0 {
+		hs[curr_index] = 0
+	} else {
 		hs[curr_index] |= TOMBSTONE_MASK
 		hs[curr_index] |= TOMBSTONE_MASK
 	}
 	}
+
 	return
 	return
 }
 }
 
 

+ 9 - 9
core/runtime/error_checks.odin

@@ -235,7 +235,7 @@ make_slice_error_loc :: #force_inline proc "contextless" (loc := #caller_locatio
 	handle_error(loc, len)
 	handle_error(loc, len)
 }
 }
 
 
-make_dynamic_array_error_loc :: #force_inline proc "contextless" (using loc := #caller_location, len, cap: int) {
+make_dynamic_array_error_loc :: #force_inline proc "contextless" (loc := #caller_location, len, cap: int) {
 	if 0 <= len && len <= cap {
 	if 0 <= len && len <= cap {
 		return
 		return
 	}
 	}
@@ -271,18 +271,18 @@ make_map_expr_error_loc :: #force_inline proc "contextless" (loc := #caller_loca
 
 
 
 
 
 
-bounds_check_error_loc :: #force_inline proc "contextless" (using loc := #caller_location, index, count: int) {
-	bounds_check_error(file_path, line, column, index, count)
+bounds_check_error_loc :: #force_inline proc "contextless" (loc := #caller_location, index, count: int) {
+	bounds_check_error(loc.file_path, loc.line, loc.column, index, count)
 }
 }
 
 
-slice_expr_error_hi_loc :: #force_inline proc "contextless" (using loc := #caller_location, hi: int, len: int) {
-	slice_expr_error_hi(file_path, line, column, hi, len)
+slice_expr_error_hi_loc :: #force_inline proc "contextless" (loc := #caller_location, hi: int, len: int) {
+	slice_expr_error_hi(loc.file_path, loc.line, loc.column, hi, len)
 }
 }
 
 
-slice_expr_error_lo_hi_loc :: #force_inline proc "contextless" (using loc := #caller_location, lo, hi: int, len: int) {
-	slice_expr_error_lo_hi(file_path, line, column, lo, hi, len)
+slice_expr_error_lo_hi_loc :: #force_inline proc "contextless" (loc := #caller_location, lo, hi: int, len: int) {
+	slice_expr_error_lo_hi(loc.file_path, loc.line, loc.column, lo, hi, len)
 }
 }
 
 
-dynamic_array_expr_error_loc :: #force_inline proc "contextless" (using loc := #caller_location, low, high, max: int) {
-	dynamic_array_expr_error(file_path, line, column, low, high, max)
+dynamic_array_expr_error_loc :: #force_inline proc "contextless" (loc := #caller_location, low, high, max: int) {
+	dynamic_array_expr_error(loc.file_path, loc.line, loc.column, low, high, max)
 }
 }

+ 6 - 6
core/runtime/print.odin

@@ -215,19 +215,19 @@ print_uint    :: proc "contextless" (x: uint)    { print_u64(u64(x)) }
 print_uintptr :: proc "contextless" (x: uintptr) { print_u64(u64(x)) }
 print_uintptr :: proc "contextless" (x: uintptr) { print_u64(u64(x)) }
 print_int     :: proc "contextless" (x: int)     { print_i64(i64(x)) }
 print_int     :: proc "contextless" (x: int)     { print_i64(i64(x)) }
 
 
-print_caller_location :: proc "contextless" (using loc: Source_Code_Location) {
-	print_string(file_path)
+print_caller_location :: proc "contextless" (loc: Source_Code_Location) {
+	print_string(loc.file_path)
 	when ODIN_ERROR_POS_STYLE == .Default {
 	when ODIN_ERROR_POS_STYLE == .Default {
 		print_byte('(')
 		print_byte('(')
-		print_u64(u64(line))
+		print_u64(u64(loc.line))
 		print_byte(':')
 		print_byte(':')
-		print_u64(u64(column))
+		print_u64(u64(loc.column))
 		print_byte(')')
 		print_byte(')')
 	} else when ODIN_ERROR_POS_STYLE == .Unix {
 	} else when ODIN_ERROR_POS_STYLE == .Unix {
 		print_byte(':')
 		print_byte(':')
-		print_u64(u64(line))
+		print_u64(u64(loc.line))
 		print_byte(':')
 		print_byte(':')
-		print_u64(u64(column))
+		print_u64(u64(loc.column))
 		print_byte(':')
 		print_byte(':')
 	} else {
 	} else {
 		#panic("unhandled ODIN_ERROR_POS_STYLE")
 		#panic("unhandled ODIN_ERROR_POS_STYLE")

+ 2 - 0
core/text/i18n/i18n.odin

@@ -71,6 +71,8 @@ Error :: enum {
 	TS_File_Expected_Source,
 	TS_File_Expected_Source,
 	TS_File_Expected_Translation,
 	TS_File_Expected_Translation,
 	TS_File_Expected_NumerusForm,
 	TS_File_Expected_NumerusForm,
+	Bad_Str,
+	Bad_Id,
 
 
 }
 }
 
 

+ 34 - 8
core/text/i18n/qt_linguist.odin

@@ -30,10 +30,26 @@ TS_XML_Options := xml.Options{
 parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTIONS, pluralizer: proc(int) -> int = nil, allocator := context.allocator) -> (translation: ^Translation, err: Error) {
 parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTIONS, pluralizer: proc(int) -> int = nil, allocator := context.allocator) -> (translation: ^Translation, err: Error) {
 	context.allocator = allocator
 	context.allocator = allocator
 
 
+	get_str :: proc(val: xml.Value) -> (str: string, err: Error) {
+		v, ok := val.(string)
+		if ok {
+			return v, .None
+		}
+		return "", .Bad_Str
+	}
+
+	get_id :: proc(val: xml.Value) -> (str: xml.Element_ID, err: Error) {
+		v, ok := val.(xml.Element_ID)
+		if ok {
+			return v, .None
+		}
+		return 0, .Bad_Id
+	}
+
 	ts, xml_err := xml.parse(data, TS_XML_Options)
 	ts, xml_err := xml.parse(data, TS_XML_Options)
 	defer xml.destroy(ts)
 	defer xml.destroy(ts)
 
 
-	if xml_err != .None || ts.element_count < 1 || ts.elements[0].ident != "TS" || len(ts.elements[0].children) == 0 {
+	if xml_err != .None || ts.element_count < 1 || ts.elements[0].ident != "TS" || len(ts.elements[0].value) == 0 {
 		return nil, .TS_File_Parse_Error
 		return nil, .TS_File_Parse_Error
 	}
 	}
 
 
@@ -46,10 +62,12 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 
 
 	section: ^Section
 	section: ^Section
 
 
-	for child_id in ts.elements[0].children {
+	for value in ts.elements[0].value {
+		child_id := get_id(value) or_return
+
 		// These should be <context>s.
 		// These should be <context>s.
-		child := ts.elements[child_id]
-		if child.ident != "context" {
+
+		if ts.elements[child_id].ident != "context" {
 			return translation, .TS_File_Expected_Context
 			return translation, .TS_File_Expected_Context
 		}
 		}
 
 
@@ -61,7 +79,8 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 
 
 		section_name, _ := strings.intern_get(&translation.intern, "")
 		section_name, _ := strings.intern_get(&translation.intern, "")
 		if !options.merge_sections {
 		if !options.merge_sections {
-			section_name, _ = strings.intern_get(&translation.intern, ts.elements[section_name_id].value)
+			value_text := get_str(ts.elements[section_name_id].value[0]) or_return
+			section_name, _ = strings.intern_get(&translation.intern, value_text)
 		}
 		}
 
 
 		if section_name not_in translation.k_v {
 		if section_name not_in translation.k_v {
@@ -92,8 +111,14 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 				return translation, .TS_File_Expected_Translation
 				return translation, .TS_File_Expected_Translation
 			}
 			}
 
 
-			source, _ := strings.intern_get(&translation.intern, ts.elements[source_id].value)
-			xlat,   _ := strings.intern_get(&translation.intern, ts.elements[translation_id].value)
+			source    := get_str(ts.elements[source_id].value[0]) or_return
+			source, _  = strings.intern_get(&translation.intern, source)
+
+			xlat := ""
+			if !has_plurals {
+				xlat    = get_str(ts.elements[translation_id].value[0]) or_return
+				xlat, _ = strings.intern_get(&translation.intern, xlat)
+			}
 
 
 			if source in section {
 			if source in section {
 				return translation, .Duplicate_Key
 				return translation, .Duplicate_Key
@@ -124,7 +149,8 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 					if !numerus_found {
 					if !numerus_found {
 						break
 						break
 					}
 					}
-					numerus, _ := strings.intern_get(&translation.intern, ts.elements[numerus_id].value)
+					numerus := get_str(ts.elements[numerus_id].value[0]) or_return
+					numerus, _ = strings.intern_get(&translation.intern, numerus)
 					section[source][num_plurals] = numerus
 					section[source][num_plurals] = numerus
 
 
 					num_plurals += 1
 					num_plurals += 1

+ 2 - 2
core/thread/thread_windows.odin

@@ -129,8 +129,8 @@ _destroy :: proc(thread: ^Thread) {
 	free(thread, thread.creation_allocator)
 	free(thread, thread.creation_allocator)
 }
 }
 
 
-_terminate :: proc(using thread : ^Thread, exit_code: int) {
-	win32.TerminateThread(win32_thread, u32(exit_code))
+_terminate :: proc(thread: ^Thread, exit_code: int) {
+	win32.TerminateThread(thread.win32_thread, u32(exit_code))
 }
 }
 
 
 _yield :: proc() {
 _yield :: proc() {

+ 19 - 17
core/time/time.odin

@@ -59,28 +59,30 @@ sleep :: proc "contextless" (d: Duration) {
 	_sleep(d)
 	_sleep(d)
 }
 }
 
 
-stopwatch_start :: proc "contextless" (using stopwatch: ^Stopwatch) {
-	if !running {
-		_start_time = tick_now()
-		running = true
+stopwatch_start :: proc "contextless" (stopwatch: ^Stopwatch) {
+	if !stopwatch.running {
+		stopwatch._start_time = tick_now()
+		stopwatch.running = true
 	}
 	}
 }
 }
 
 
-stopwatch_stop :: proc "contextless" (using stopwatch: ^Stopwatch) {
-	if running {
-		_accumulation += tick_diff(_start_time, tick_now())
-		running = false
+stopwatch_stop :: proc "contextless" (stopwatch: ^Stopwatch) {
+	if stopwatch.running {
+		stopwatch._accumulation += tick_diff(stopwatch._start_time, tick_now())
+		stopwatch.running = false
 	}
 	}
 }
 }
 
 
-stopwatch_reset :: proc "contextless" (using stopwatch: ^Stopwatch) {
-	_accumulation = {}
-	running = false
+stopwatch_reset :: proc "contextless" (stopwatch: ^Stopwatch) {
+	stopwatch._accumulation = {}
+	stopwatch.running = false
 }
 }
 
 
-stopwatch_duration :: proc "contextless" (using stopwatch: Stopwatch) -> Duration {
-	if !running { return _accumulation }
-	return _accumulation + tick_diff(_start_time, tick_now())
+stopwatch_duration :: proc "contextless" (stopwatch: Stopwatch) -> Duration {
+	if !stopwatch.running {
+		return stopwatch._accumulation
+	}
+	return stopwatch._accumulation + tick_diff(stopwatch._start_time, tick_now())
 }
 }
 
 
 diff :: proc "contextless" (start, end: Time) -> Duration {
 diff :: proc "contextless" (start, end: Time) -> Duration {
@@ -171,9 +173,9 @@ day :: proc "contextless" (t: Time) -> (day: int) {
 }
 }
 
 
 weekday :: proc "contextless" (t: Time) -> (weekday: Weekday) {
 weekday :: proc "contextless" (t: Time) -> (weekday: Weekday) {
-    abs := _time_abs(t)
-    sec := (abs + u64(Weekday.Monday) * SECONDS_PER_DAY) % SECONDS_PER_WEEK
-    return Weekday(int(sec) / SECONDS_PER_DAY)
+	abs := _time_abs(t)
+	sec := (abs + u64(Weekday.Monday) * SECONDS_PER_DAY) % SECONDS_PER_WEEK
+	return Weekday(int(sec) / SECONDS_PER_DAY)
 }
 }
 
 
 clock :: proc { clock_from_time, clock_from_duration, clock_from_stopwatch }
 clock :: proc { clock_from_time, clock_from_duration, clock_from_stopwatch }

+ 1 - 0
examples/demo/demo.odin

@@ -1,3 +1,4 @@
+//+vet !using-stmt !using-param
 package main
 package main
 
 
 import "core:fmt"
 import "core:fmt"

+ 33 - 2
src/build_settings.cpp

@@ -216,6 +216,37 @@ enum BuildPath : u8 {
 	BuildPathCOUNT,
 	BuildPathCOUNT,
 };
 };
 
 
+enum VetFlags : u64 {
+	VetFlag_NONE       = 0,
+	VetFlag_Unused     = 1u<<0, // 1
+	VetFlag_Shadowing  = 1u<<1, // 2
+	VetFlag_UsingStmt  = 1u<<2, // 4
+	VetFlag_UsingParam = 1u<<3, // 8
+
+	VetFlag_Extra     = 1u<<16,
+
+	VetFlag_All = VetFlag_Unused|VetFlag_Shadowing|VetFlag_UsingStmt, // excluding extra
+
+	VetFlag_Using = VetFlag_UsingStmt|VetFlag_UsingParam,
+};
+
+u64 get_vet_flag_from_name(String const &name) {
+	if (name == "unused") {
+		return VetFlag_Unused;
+	} else if (name == "shadowing") {
+		return VetFlag_Shadowing;
+	} else if (name == "using-stmt") {
+		return VetFlag_UsingStmt;
+	} else if (name == "using-param") {
+		return VetFlag_UsingParam;
+	} else if (name == "extra") {
+		return VetFlag_Extra;
+	}
+	return VetFlag_NONE;
+}
+
+
+
 // This stores the information for the specify architecture of this build
 // This stores the information for the specify architecture of this build
 struct BuildContext {
 struct BuildContext {
 	// Constants
 	// Constants
@@ -255,6 +286,8 @@ struct BuildContext {
 	String resource_filepath;
 	String resource_filepath;
 	String pdb_filepath;
 	String pdb_filepath;
 
 
+	u64 vet_flags;
+
 	bool   has_resource;
 	bool   has_resource;
 	String link_flags;
 	String link_flags;
 	String extra_linker_flags;
 	String extra_linker_flags;
@@ -280,8 +313,6 @@ struct BuildContext {
 	bool   no_entry_point;
 	bool   no_entry_point;
 	bool   no_thread_local;
 	bool   no_thread_local;
 	bool   use_lld;
 	bool   use_lld;
-	bool   vet;
-	bool   vet_extra;
 	bool   cross_compiling;
 	bool   cross_compiling;
 	bool   different_os;
 	bool   different_os;
 	bool   keep_object_files;
 	bool   keep_object_files;

+ 2 - 2
src/check_decl.cpp

@@ -1064,7 +1064,7 @@ gb_internal void check_proc_decl(CheckerContext *ctx, Entity *e, DeclInfo *d) {
 		auto *fp = &ctx->info->foreigns;
 		auto *fp = &ctx->info->foreigns;
 		StringHashKey key = string_hash_string(name);
 		StringHashKey key = string_hash_string(name);
 		Entity **found = string_map_get(fp, key);
 		Entity **found = string_map_get(fp, key);
-		if (found) {
+		if (found && e != *found) {
 			Entity *f = *found;
 			Entity *f = *found;
 			TokenPos pos = f->token.pos;
 			TokenPos pos = f->token.pos;
 			Type *this_type = base_type(e->type);
 			Type *this_type = base_type(e->type);
@@ -1636,7 +1636,7 @@ gb_internal bool check_proc_body(CheckerContext *ctx_, Token token, DeclInfo *de
 	}
 	}
 	check_close_scope(ctx);
 	check_close_scope(ctx);
 
 
-	check_scope_usage(ctx->checker, ctx->scope);
+	check_scope_usage(ctx->checker, ctx->scope, check_vet_flags(body));
 
 
 	add_deps_from_child_to_parent(decl);
 	add_deps_from_child_to_parent(decl);
 
 

+ 3 - 3
src/check_expr.cpp

@@ -3099,7 +3099,7 @@ gb_internal void check_cast(CheckerContext *c, Operand *x, Type *type) {
 		update_untyped_expr_type(c, x->expr, final_type, true);
 		update_untyped_expr_type(c, x->expr, final_type, true);
 	}
 	}
 
 
-	if (build_context.vet_extra) {
+	if (check_vet_flags(x->expr) & VetFlag_Extra) {
 		if (are_types_identical(x->type, type)) {
 		if (are_types_identical(x->type, type)) {
 			gbString str = type_to_string(type);
 			gbString str = type_to_string(type);
 			warning(x->expr, "Unneeded cast to the same type '%s'", str);
 			warning(x->expr, "Unneeded cast to the same type '%s'", str);
@@ -3171,7 +3171,7 @@ gb_internal bool check_transmute(CheckerContext *c, Ast *node, Operand *o, Type
 		return false;
 		return false;
 	}
 	}
 
 
-	if (build_context.vet_extra) {
+	if (check_vet_flags(node) & VetFlag_Extra) {
 		if (are_types_identical(o->type, dst_t)) {
 		if (are_types_identical(o->type, dst_t)) {
 			gbString str = type_to_string(dst_t);
 			gbString str = type_to_string(dst_t);
 			warning(o->expr, "Unneeded transmute to the same type '%s'", str);
 			warning(o->expr, "Unneeded transmute to the same type '%s'", str);
@@ -10028,7 +10028,7 @@ gb_internal ExprKind check_expr_base_internal(CheckerContext *c, Operand *o, Ast
 			Type *type = type_of_expr(ac->expr);
 			Type *type = type_of_expr(ac->expr);
 			check_cast(c, o, type_hint);
 			check_cast(c, o, type_hint);
 			if (is_type_typed(type) && are_types_identical(type, type_hint)) {
 			if (is_type_typed(type) && are_types_identical(type, type_hint)) {
-				if (build_context.vet_extra) {
+				if (check_vet_flags(node) & VetFlag_Extra) {
 					error(node, "Redundant 'auto_cast' applied to expression");
 					error(node, "Redundant 'auto_cast' applied to expression");
 				}
 				}
 			}
 			}

+ 6 - 0
src/check_stmt.cpp

@@ -2464,6 +2464,12 @@ gb_internal void check_stmt_internal(CheckerContext *ctx, Ast *node, u32 flags)
 			error(us->token, "Empty 'using' list");
 			error(us->token, "Empty 'using' list");
 			return;
 			return;
 		}
 		}
+		if (check_vet_flags(node) & VetFlag_UsingStmt) {
+			ERROR_BLOCK();
+			error(node, "'using' as a statement is now allowed when '-vet' or '-vet-using' is applied");
+			error_line("\t'using' is considered bad practice to use as a statement outside of immediate refactoring\n");
+		}
+
 		for (Ast *expr : us->list) {
 		for (Ast *expr : us->list) {
 			expr = unparen_expr(expr);
 			expr = unparen_expr(expr);
 			Entity *e = nullptr;
 			Entity *e = nullptr;

+ 6 - 0
src/check_type.cpp

@@ -1474,6 +1474,12 @@ gb_internal Type *check_get_params(CheckerContext *ctx, Scope *scope, Ast *_para
 		Type *specialization = nullptr;
 		Type *specialization = nullptr;
 
 
 		bool is_using = (p->flags&FieldFlag_using) != 0;
 		bool is_using = (p->flags&FieldFlag_using) != 0;
+		if ((check_vet_flags(param) & VetFlag_UsingParam) && is_using) {
+			ERROR_BLOCK();
+			error(param, "'using' on a procedure parameter is now allowed when '-vet' or '-vet-using-param' is applied");
+			error_line("\t'using' is considered bad practice to use as a statement/procedure parameter outside of immediate refactoring\n");
+
+		}
 
 
 		if (type_expr == nullptr) {
 		if (type_expr == nullptr) {
 			param_value = handle_parameter_value(ctx, nullptr, &type, default_value, true);
 			param_value = handle_parameter_value(ctx, nullptr, &type, default_value, true);

+ 43 - 9
src/checker.cpp

@@ -521,6 +521,28 @@ GB_COMPARE_PROC(entity_variable_pos_cmp) {
 }
 }
 
 
 
 
+
+gb_internal u64 check_vet_flags(CheckerContext *c) {
+	AstFile *file = c->file;
+	if (file == nullptr &&
+	    c->curr_proc_decl &&
+	    c->curr_proc_decl->proc_lit) {
+		file = c->curr_proc_decl->proc_lit->file();
+	}
+	if (file && file->vet_flags_set) {
+		return file->vet_flags;
+	}
+	return build_context.vet_flags;
+}
+
+gb_internal u64 check_vet_flags(Ast *node) {
+	AstFile *file = node->file();
+	if (file && file->vet_flags_set) {
+		return file->vet_flags;
+	}
+	return build_context.vet_flags;
+}
+
 enum VettedEntityKind {
 enum VettedEntityKind {
 	VettedEntity_Invalid,
 	VettedEntity_Invalid,
 
 
@@ -655,9 +677,9 @@ gb_internal bool check_vet_unused(Checker *c, Entity *e, VettedEntity *ve) {
 	return false;
 	return false;
 }
 }
 
 
-gb_internal void check_scope_usage(Checker *c, Scope *scope) {
-	bool vet_unused = true;
-	bool vet_shadowing = true;
+gb_internal void check_scope_usage(Checker *c, Scope *scope, u64 vet_flags) {
+	bool vet_unused = (vet_flags & VetFlag_Unused) != 0;
+	bool vet_shadowing = (vet_flags & (VetFlag_Shadowing|VetFlag_Using)) != 0;
 
 
 	Array<VettedEntity> vetted_entities = {};
 	Array<VettedEntity> vetted_entities = {};
 	array_init(&vetted_entities, heap_allocator());
 	array_init(&vetted_entities, heap_allocator());
@@ -691,15 +713,17 @@ gb_internal void check_scope_usage(Checker *c, Scope *scope) {
 
 
 		if (ve.kind == VettedEntity_Shadowed_And_Unused) {
 		if (ve.kind == VettedEntity_Shadowed_And_Unused) {
 			error(e->token, "'%.*s' declared but not used, possibly shadows declaration at line %d", LIT(name), other->token.pos.line);
 			error(e->token, "'%.*s' declared but not used, possibly shadows declaration at line %d", LIT(name), other->token.pos.line);
-		} else if (build_context.vet) {
+		} else if (vet_flags) {
 			switch (ve.kind) {
 			switch (ve.kind) {
 			case VettedEntity_Unused:
 			case VettedEntity_Unused:
-				error(e->token, "'%.*s' declared but not used", LIT(name));
+				if (vet_flags & VetFlag_Unused) {
+					error(e->token, "'%.*s' declared but not used", LIT(name));
+				}
 				break;
 				break;
 			case VettedEntity_Shadowed:
 			case VettedEntity_Shadowed:
-				if (e->flags&EntityFlag_Using) {
+				if ((vet_flags & (VetFlag_Shadowing|VetFlag_Using)) != 0 && e->flags&EntityFlag_Using) {
 					error(e->token, "Declaration of '%.*s' from 'using' shadows declaration at line %d", LIT(name), other->token.pos.line);
 					error(e->token, "Declaration of '%.*s' from 'using' shadows declaration at line %d", LIT(name), other->token.pos.line);
-				} else {
+				} else if ((vet_flags & (VetFlag_Shadowing)) != 0) {
 					error(e->token, "Declaration of '%.*s' shadows declaration at line %d", LIT(name), other->token.pos.line);
 					error(e->token, "Declaration of '%.*s' shadows declaration at line %d", LIT(name), other->token.pos.line);
 				}
 				}
 				break;
 				break;
@@ -726,7 +750,7 @@ gb_internal void check_scope_usage(Checker *c, Scope *scope) {
 		if (child->flags & (ScopeFlag_Proc|ScopeFlag_Type|ScopeFlag_File)) {
 		if (child->flags & (ScopeFlag_Proc|ScopeFlag_Type|ScopeFlag_File)) {
 			// Ignore these
 			// Ignore these
 		} else {
 		} else {
-			check_scope_usage(c, child);
+			check_scope_usage(c, child, vet_flags);
 		}
 		}
 	}
 	}
 }
 }
@@ -2978,6 +3002,12 @@ gb_internal DECL_ATTRIBUTE_PROC(proc_group_attribute) {
 			}
 			}
 		}
 		}
 		return true;
 		return true;
+	} else if (name == "require_results") {
+		if (value != nullptr) {
+			error(elem, "Expected no value for '%.*s'", LIT(name));
+		}
+		ac->require_results = true;
+		return true;
 	}
 	}
 	return false;
 	return false;
 }
 }
@@ -5952,7 +5982,11 @@ gb_internal void check_parsed_files(Checker *c) {
 	TIME_SECTION("check scope usage");
 	TIME_SECTION("check scope usage");
 	for (auto const &entry : c->info.files) {
 	for (auto const &entry : c->info.files) {
 		AstFile *f = entry.value;
 		AstFile *f = entry.value;
-		check_scope_usage(c, f->scope);
+		u64 vet_flags = build_context.vet_flags;
+		if (f->vet_flags_set) {
+			vet_flags = f->vet_flags;
+		}
+		check_scope_usage(c, f->scope, vet_flags);
 	}
 	}
 
 
 	TIME_SECTION("add basic type information");
 	TIME_SECTION("add basic type information");

+ 3 - 0
src/checker.hpp

@@ -449,6 +449,9 @@ struct CheckerContext {
 	Ast *assignment_lhs_hint;
 	Ast *assignment_lhs_hint;
 };
 };
 
 
+gb_internal u64 check_vet_flags(CheckerContext *c);
+gb_internal u64 check_vet_flags(Ast *node);
+
 
 
 struct Checker {
 struct Checker {
 	Parser *    parser;
 	Parser *    parser;

+ 50 - 7
src/main.cpp

@@ -654,6 +654,10 @@ enum BuildFlagKind {
 	BuildFlag_NoThreadedChecker,
 	BuildFlag_NoThreadedChecker,
 	BuildFlag_ShowDebugMessages,
 	BuildFlag_ShowDebugMessages,
 	BuildFlag_Vet,
 	BuildFlag_Vet,
+	BuildFlag_VetShadowing,
+	BuildFlag_VetUnused,
+	BuildFlag_VetUsingStmt,
+	BuildFlag_VetUsingParam,
 	BuildFlag_VetExtra,
 	BuildFlag_VetExtra,
 	BuildFlag_IgnoreUnknownAttributes,
 	BuildFlag_IgnoreUnknownAttributes,
 	BuildFlag_ExtraLinkerFlags,
 	BuildFlag_ExtraLinkerFlags,
@@ -830,8 +834,14 @@ gb_internal bool parse_build_flags(Array<String> args) {
 	add_flag(&build_flags, BuildFlag_UseSeparateModules,      str_lit("use-separate-modules"),      BuildFlagParam_None,    Command__does_build);
 	add_flag(&build_flags, BuildFlag_UseSeparateModules,      str_lit("use-separate-modules"),      BuildFlagParam_None,    Command__does_build);
 	add_flag(&build_flags, BuildFlag_NoThreadedChecker,       str_lit("no-threaded-checker"),       BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_NoThreadedChecker,       str_lit("no-threaded-checker"),       BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_ShowDebugMessages,       str_lit("show-debug-messages"),       BuildFlagParam_None,    Command_all);
 	add_flag(&build_flags, BuildFlag_ShowDebugMessages,       str_lit("show-debug-messages"),       BuildFlagParam_None,    Command_all);
+
 	add_flag(&build_flags, BuildFlag_Vet,                     str_lit("vet"),                       BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_Vet,                     str_lit("vet"),                       BuildFlagParam_None,    Command__does_check);
+	add_flag(&build_flags, BuildFlag_VetUnused,               str_lit("vet-unused"),                BuildFlagParam_None,    Command__does_check);
+	add_flag(&build_flags, BuildFlag_VetShadowing,            str_lit("vet-shadowing"),             BuildFlagParam_None,    Command__does_check);
+	add_flag(&build_flags, BuildFlag_VetUsingStmt,            str_lit("vet-using-stmt"),            BuildFlagParam_None,    Command__does_check);
+	add_flag(&build_flags, BuildFlag_VetUsingParam,           str_lit("vet-using-param"),            BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_VetExtra,                str_lit("vet-extra"),                 BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_VetExtra,                str_lit("vet-extra"),                 BuildFlagParam_None,    Command__does_check);
+
 	add_flag(&build_flags, BuildFlag_IgnoreUnknownAttributes, str_lit("ignore-unknown-attributes"), BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_IgnoreUnknownAttributes, str_lit("ignore-unknown-attributes"), BuildFlagParam_None,    Command__does_check);
 	add_flag(&build_flags, BuildFlag_ExtraLinkerFlags,        str_lit("extra-linker-flags"),        BuildFlagParam_String,  Command__does_build);
 	add_flag(&build_flags, BuildFlag_ExtraLinkerFlags,        str_lit("extra-linker-flags"),        BuildFlagParam_String,  Command__does_build);
 	add_flag(&build_flags, BuildFlag_ExtraAssemblerFlags,     str_lit("extra-assembler-flags"),     BuildFlagParam_String,  Command__does_build);
 	add_flag(&build_flags, BuildFlag_ExtraAssemblerFlags,     str_lit("extra-assembler-flags"),     BuildFlagParam_String,  Command__does_build);
@@ -1362,13 +1372,23 @@ gb_internal bool parse_build_flags(Array<String> args) {
 							build_context.show_debug_messages = true;
 							build_context.show_debug_messages = true;
 							break;
 							break;
 						case BuildFlag_Vet:
 						case BuildFlag_Vet:
-							build_context.vet = true;
+							if (build_context.vet_flags & VetFlag_Extra) {
+								build_context.vet_flags |= VetFlag_All;
+							} else {
+								build_context.vet_flags &= ~VetFlag_Extra;
+								build_context.vet_flags |= VetFlag_All;
+							}
 							break;
 							break;
-						case BuildFlag_VetExtra: {
-							build_context.vet = true;
-							build_context.vet_extra = true;
+
+						case BuildFlag_VetUnused:     build_context.vet_flags |= VetFlag_Unused;    break;
+						case BuildFlag_VetShadowing:  build_context.vet_flags |= VetFlag_Shadowing; break;
+						case BuildFlag_VetUsingStmt:  build_context.vet_flags |= VetFlag_UsingStmt; break;
+						case BuildFlag_VetUsingParam: build_context.vet_flags |= VetFlag_UsingParam; break;
+
+						case BuildFlag_VetExtra:
+							build_context.vet_flags = VetFlag_All | VetFlag_Extra;
 							break;
 							break;
-						}
+
 						case BuildFlag_IgnoreUnknownAttributes:
 						case BuildFlag_IgnoreUnknownAttributes:
 							build_context.ignore_unknown_attributes = true;
 							build_context.ignore_unknown_attributes = true;
 							break;
 							break;
@@ -2124,19 +2144,42 @@ gb_internal void print_show_help(String const arg0, String const &command) {
 		print_usage_line(2, "Multithread the semantic checker stage");
 		print_usage_line(2, "Multithread the semantic checker stage");
 		print_usage_line(0, "");
 		print_usage_line(0, "");
 		#endif
 		#endif
+	}
 
 
+	if (check) {
 		print_usage_line(1, "-vet");
 		print_usage_line(1, "-vet");
 		print_usage_line(2, "Do extra checks on the code");
 		print_usage_line(2, "Do extra checks on the code");
 		print_usage_line(2, "Extra checks include:");
 		print_usage_line(2, "Extra checks include:");
-		print_usage_line(3, "Variable shadowing within procedures");
-		print_usage_line(3, "Unused declarations");
+		print_usage_line(2, "-vet-unused");
+		print_usage_line(2, "-vet-shadowing");
+		print_usage_line(2, "-vet-using-stmt");
+		print_usage_line(0, "");
+
+		print_usage_line(1, "-vet-unused");
+		print_usage_line(2, "Checks for unused declarations");
+		print_usage_line(0, "");
+
+		print_usage_line(1, "-vet-shadowing");
+		print_usage_line(2, "Checks for variable shadowing within procedures");
+		print_usage_line(0, "");
+
+		print_usage_line(1, "-vet-using-stmt");
+		print_usage_line(2, "Checks for the use of 'using' as a statement");
+		print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring");
+		print_usage_line(0, "");
+
+		print_usage_line(1, "-vet-using-param");
+		print_usage_line(2, "Checks for the use of 'using' on procedure parameters");
+		print_usage_line(2, "'using' is considered bad practice outside of immediate refactoring");
 		print_usage_line(0, "");
 		print_usage_line(0, "");
 
 
 		print_usage_line(1, "-vet-extra");
 		print_usage_line(1, "-vet-extra");
 		print_usage_line(2, "Do even more checks than standard vet on the code");
 		print_usage_line(2, "Do even more checks than standard vet on the code");
 		print_usage_line(2, "To treat the extra warnings as errors, use -warnings-as-errors");
 		print_usage_line(2, "To treat the extra warnings as errors, use -warnings-as-errors");
 		print_usage_line(0, "");
 		print_usage_line(0, "");
+	}
 
 
+	if (check) {
 		print_usage_line(1, "-ignore-unknown-attributes");
 		print_usage_line(1, "-ignore-unknown-attributes");
 		print_usage_line(2, "Ignores unknown attributes");
 		print_usage_line(2, "Ignores unknown attributes");
 		print_usage_line(2, "This can be used with metaprogramming tools");
 		print_usage_line(2, "This can be used with metaprogramming tools");

+ 85 - 0
src/parser.cpp

@@ -5528,6 +5528,88 @@ gb_internal bool parse_build_tag(Token token_for_pos, String s) {
 	return any_correct;
 	return any_correct;
 }
 }
 
 
+gb_internal String vet_tag_get_token(String s, String *out) {
+	s = string_trim_whitespace(s);
+	isize n = 0;
+	while (n < s.len) {
+		Rune rune = 0;
+		isize width = utf8_decode(&s[n], s.len-n, &rune);
+		if (n == 0 && rune == '!') {
+
+		} else if (!rune_is_letter(rune) && !rune_is_digit(rune) && rune != '-') {
+			isize k = gb_max(gb_max(n, width), 1);
+			*out = substring(s, k, s.len);
+			return substring(s, 0, k);
+		}
+		n += width;
+	}
+	out->len = 0;
+	return s;
+}
+
+
+gb_internal u64 parse_vet_tag(Token token_for_pos, String s) {
+	String const prefix = str_lit("+vet");
+	GB_ASSERT(string_starts_with(s, prefix));
+	s = string_trim_whitespace(substring(s, prefix.len, s.len));
+
+	if (s.len == 0) {
+		return VetFlag_All;
+	}
+
+
+	u64 vet_flags = 0;
+	u64 vet_not_flags = 0;
+
+	while (s.len > 0) {
+		String p = string_trim_whitespace(vet_tag_get_token(s, &s));
+		if (p.len == 0) {
+			break;
+		}
+
+		bool is_notted = false;
+		if (p[0] == '!') {
+			is_notted = true;
+			p = substring(p, 1, p.len);
+			if (p.len == 0) {
+				syntax_error(token_for_pos, "Expected a vet flag name after '!'");
+				return build_context.vet_flags;
+			}
+		}
+
+		u64 flag = get_vet_flag_from_name(p);
+		if (flag != VetFlag_NONE) {
+			if (is_notted) {
+				vet_not_flags |= flag;
+			} else {
+				vet_flags     |= flag;
+			}
+		} else {
+			ERROR_BLOCK();
+			syntax_error(token_for_pos, "Invalid vet flag name: %.*s", LIT(p));
+			error_line("\tExpected one of the following\n");
+			error_line("\tunused\n");
+			error_line("\tshadowing\n");
+			error_line("\tusing-stmt\n");
+			error_line("\tusing-param\n");
+			error_line("\textra\n");
+			return build_context.vet_flags;
+		}
+	}
+
+	if (vet_flags == 0 && vet_not_flags == 0) {
+		return build_context.vet_flags;
+	}
+	if (vet_flags == 0 && vet_not_flags != 0) {
+		return build_context.vet_flags &~ vet_not_flags;
+	}
+	if (vet_flags != 0 && vet_not_flags == 0) {
+		return vet_flags;
+	}
+	GB_ASSERT(vet_flags != 0 && vet_not_flags != 0);
+	return vet_flags &~ vet_not_flags;
+}
+
 gb_internal String dir_from_path(String path) {
 gb_internal String dir_from_path(String path) {
 	String base_dir = path;
 	String base_dir = path;
 	for (isize i = path.len-1; i >= 0; i--) {
 	for (isize i = path.len-1; i >= 0; i--) {
@@ -5679,6 +5761,9 @@ gb_internal bool parse_file(Parser *p, AstFile *f) {
 						if (!parse_build_tag(tok, lc)) {
 						if (!parse_build_tag(tok, lc)) {
 							return false;
 							return false;
 						}
 						}
+					} else if (string_starts_with(lc, str_lit("+vet"))) {
+						f->vet_flags = parse_vet_tag(tok, lc);
+						f->vet_flags_set = true;
 					} else if (string_starts_with(lc, str_lit("+ignore"))) {
 					} else if (string_starts_with(lc, str_lit("+ignore"))) {
 							return false;
 							return false;
 					} else if (string_starts_with(lc, str_lit("+private"))) {
 					} else if (string_starts_with(lc, str_lit("+private"))) {

+ 2 - 0
src/parser.hpp

@@ -104,6 +104,8 @@ struct AstFile {
 	Token        package_token;
 	Token        package_token;
 	String       package_name;
 	String       package_name;
 
 
+	u64          vet_flags;
+	bool         vet_flags_set;
 
 
 	// >= 0: In Expression
 	// >= 0: In Expression
 	// <  0: In Control Clause
 	// <  0: In Control Clause

+ 14 - 20
tests/core/encoding/hxa/test_core_hxa.odin

@@ -21,16 +21,13 @@ main :: proc() {
 
 
 @test
 @test
 test_read :: proc(t: ^testing.T) {
 test_read :: proc(t: ^testing.T) {
-
-	using hxa
-
 	filename := tc.get_data_path(t, TEAPOT_PATH)
 	filename := tc.get_data_path(t, TEAPOT_PATH)
 	defer delete(filename)
 	defer delete(filename)
 
 
-	file, err := read_from_file(filename)
+	file, err := hxa.read_from_file(filename)
 	e :: hxa.Read_Error.None
 	e :: hxa.Read_Error.None
 	tc.expect(t, err == e, fmt.tprintf("%v: read_from_file(%v) -> %v != %v", #procedure, filename, err, e))
 	tc.expect(t, err == e, fmt.tprintf("%v: read_from_file(%v) -> %v != %v", #procedure, filename, err, e))
-	defer file_destroy(file)
+	defer hxa.file_destroy(file)
 
 
 	/* Header */
 	/* Header */
 	tc.expect(t, file.magic_number == 0x417848, fmt.tprintf("%v: file.magic_number %v != %v",
 	tc.expect(t, file.magic_number == 0x417848, fmt.tprintf("%v: file.magic_number %v != %v",
@@ -134,38 +131,35 @@ test_read :: proc(t: ^testing.T) {
 
 
 @test
 @test
 test_write :: proc(t: ^testing.T) {
 test_write :: proc(t: ^testing.T) {
-
-	using hxa
-
-	n1 :Node
+	n1: hxa.Node
 
 
 	n1_m1_value := []f64le{0.4, -1.23, 2341.6, -333.333}
 	n1_m1_value := []f64le{0.4, -1.23, 2341.6, -333.333}
-	n1_m1 := Meta{"m1", n1_m1_value}
+	n1_m1 := hxa.Meta{"m1", n1_m1_value}
 
 
-	n1.meta_data = []Meta{n1_m1}
+	n1.meta_data = []hxa.Meta{n1_m1}
 
 
-	n1_l1 := Layer{"l1", 2, []f32le{32.1, -41.3}}
-	n1_l2 := Layer{"l2", 3, []f64le{0.64, 1.64, -2.64}}
+	n1_l1 := hxa.Layer{"l1", 2, []f32le{32.1, -41.3}}
+	n1_l2 := hxa.Layer{"l2", 3, []f64le{0.64, 1.64, -2.64}}
 
 
-	n1_content := Node_Image{Image_Type.Image_1D, [3]u32le{1, 1, 2}, Layer_Stack{n1_l1, n1_l2}} 
+	n1_content := hxa.Node_Image{.Image_1D, [3]u32le{1, 1, 2}, hxa.Layer_Stack{n1_l1, n1_l2}}
 
 
 	n1.content = n1_content
 	n1.content = n1_content
 
 
-	w_file :File
-	w_file.nodes = []Node{n1}
+	w_file: hxa.File
+	w_file.nodes = []hxa.Node{n1}
 
 
-	required_size := required_write_size(w_file)
+	required_size := hxa.required_write_size(w_file)
 	buf := make([]u8, required_size)
 	buf := make([]u8, required_size)
 
 
-	n, write_err := write(buf, w_file)
+	n, write_err := hxa.write(buf, w_file)
 	write_e :: hxa.Write_Error.None
 	write_e :: hxa.Write_Error.None
 	tc.expect(t, write_err == write_e, fmt.tprintf("%v: write_err %v != %v", #procedure, write_err, write_e))
 	tc.expect(t, write_err == write_e, fmt.tprintf("%v: write_err %v != %v", #procedure, write_err, write_e))
 	tc.expect(t, n == required_size, fmt.tprintf("%v: n %v != %v", #procedure, n, required_size))
 	tc.expect(t, n == required_size, fmt.tprintf("%v: n %v != %v", #procedure, n, required_size))
 
 
-	file, read_err := read(buf)
+	file, read_err := hxa.read(buf)
 	read_e :: hxa.Read_Error.None
 	read_e :: hxa.Read_Error.None
 	tc.expect(t, read_err == read_e, fmt.tprintf("%v: read_err %v != %v", #procedure, read_err, read_e))
 	tc.expect(t, read_err == read_e, fmt.tprintf("%v: read_err %v != %v", #procedure, read_err, read_e))
-	defer file_destroy(file)
+	defer hxa.file_destroy(file)
 
 
 	delete(buf)
 	delete(buf)
 
 

+ 33 - 36
tests/core/encoding/xml/test_core_xml.odin

@@ -47,7 +47,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "恥ずべきフクロウ",
 			expected_doctype = "恥ずべきフクロウ",
 		},
 		},
-		crc32     = 0x30d82264,
+		crc32     = 0xe9b62f03,
 	},
 	},
 
 
 	{
 	{
@@ -62,7 +62,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "恥ずべきフクロウ",
 			expected_doctype = "恥ずべきフクロウ",
 		},
 		},
-		crc32     = 0xad31d8e8,
+		crc32     = 0x9c2643ed,
 	},
 	},
 
 
 	{
 	{
@@ -77,7 +77,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "TS",
 			expected_doctype = "TS",
 		},
 		},
-		crc32     = 0x7bce2630,
+		crc32     = 0x859b7443,
 	},
 	},
 
 
 	{
 	{
@@ -92,7 +92,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "xliff",
 			expected_doctype = "xliff",
 		},
 		},
-		crc32     = 0x43f19d61,
+		crc32     = 0x3deaf329,
 	},
 	},
 
 
 	{
 	{
@@ -107,7 +107,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "xliff",
 			expected_doctype = "xliff",
 		},
 		},
-		crc32     = 0x961e7635,
+		crc32     = 0x0c55e287,
 	},
 	},
 
 
 	{
 	{
@@ -118,7 +118,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "html",
 			expected_doctype = "html",
 		},
 		},
-		crc32     = 0x573c1033,
+		crc32     = 0x05373317,
 	},
 	},
 
 
 	{
 	{
@@ -129,7 +129,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "html",
 			expected_doctype = "html",
 		},
 		},
-		crc32     = 0x82588917,
+		crc32     = 0x3b6d4a90,
 	},
 	},
 
 
 	{
 	{
@@ -140,7 +140,7 @@ TESTS :: []TEST{
 			},
 			},
 			expected_doctype = "html",
 			expected_doctype = "html",
 		},
 		},
-		crc32     = 0x5e74d8a6,
+		crc32     = 0x5be2ffdc,
 	},
 	},
 
 
 	/*
 	/*
@@ -170,7 +170,7 @@ TESTS :: []TEST{
 			expected_doctype = "",
 			expected_doctype = "",
 		},
 		},
 		err       = .None,
 		err       = .None,
-		crc32     = 0xcaa042b9,
+		crc32     = 0x420dbac5,
 	},
 	},
 }
 }
 
 
@@ -214,43 +214,40 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) {
 	*/
 	*/
 	print :: proc(writer: io.Writer, doc: ^xml.Document) -> (written: int, err: io.Error) {
 	print :: proc(writer: io.Writer, doc: ^xml.Document) -> (written: int, err: io.Error) {
 		if doc == nil { return }
 		if doc == nil { return }
-		using fmt
 
 
-		written += wprintf(writer, "[XML Prolog]\n")
+		written += fmt.wprintf(writer, "[XML Prolog]\n")
 
 
 		for attr in doc.prologue {
 		for attr in doc.prologue {
-			written += wprintf(writer, "\t%v: %v\n", attr.key, attr.val)
+			written += fmt.wprintf(writer, "\t%v: %v\n", attr.key, attr.val)
 		}
 		}
 
 
-		written += wprintf(writer, "[Encoding] %v\n", doc.encoding)
+		written += fmt.wprintf(writer, "[Encoding] %v\n", doc.encoding)
 
 
 		if len(doc.doctype.ident) > 0 {
 		if len(doc.doctype.ident) > 0 {
-			written += wprintf(writer, "[DOCTYPE]  %v\n", doc.doctype.ident)
+			written += fmt.wprintf(writer, "[DOCTYPE]  %v\n", doc.doctype.ident)
 
 
 			if len(doc.doctype.rest) > 0 {
 			if len(doc.doctype.rest) > 0 {
-			 	wprintf(writer, "\t%v\n", doc.doctype.rest)
+			 	fmt.wprintf(writer, "\t%v\n", doc.doctype.rest)
 			}
 			}
 		}
 		}
 
 
 		for comment in doc.comments {
 		for comment in doc.comments {
-			written += wprintf(writer, "[Pre-root comment]  %v\n", comment)
+			written += fmt.wprintf(writer, "[Pre-root comment]  %v\n", comment)
 		}
 		}
 
 
 		if doc.element_count > 0 {
 		if doc.element_count > 0 {
-		 	wprintln(writer, " --- ")
+		 	fmt.wprintln(writer, " --- ")
 		 	print_element(writer, doc, 0)
 		 	print_element(writer, doc, 0)
-		 	wprintln(writer, " --- ")
+		 	fmt.wprintln(writer, " --- ")
 		 }
 		 }
 
 
 		return written, .None
 		return written, .None
 	}
 	}
 
 
 	print_element :: proc(writer: io.Writer, doc: ^xml.Document, element_id: xml.Element_ID, indent := 0) -> (written: int, err: io.Error) {
 	print_element :: proc(writer: io.Writer, doc: ^xml.Document, element_id: xml.Element_ID, indent := 0) -> (written: int, err: io.Error) {
-		using fmt
-
 		tab :: proc(writer: io.Writer, indent: int) {
 		tab :: proc(writer: io.Writer, indent: int) {
 			for _ in 0..=indent {
 			for _ in 0..=indent {
-				wprintf(writer, "\t")
+				fmt.wprintf(writer, "\t")
 			}
 			}
 		}
 		}
 
 
@@ -259,22 +256,24 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) {
 		element := doc.elements[element_id]
 		element := doc.elements[element_id]
 
 
 		if element.kind == .Element {
 		if element.kind == .Element {
-			wprintf(writer, "<%v>\n", element.ident)
-			if len(element.value) > 0 {
-				tab(writer, indent + 1)
-				wprintf(writer, "[Value] %v\n", element.value)
+			fmt.wprintf(writer, "<%v>\n", element.ident)
+
+			for value in element.value {
+				switch v in value {
+				case string:
+					tab(writer, indent + 1)
+					fmt.wprintf(writer, "[Value] %v\n", v)
+				case xml.Element_ID:
+					print_element(writer, doc, v, indent + 1)
+				}
 			}
 			}
 
 
 			for attr in element.attribs {
 			for attr in element.attribs {
 				tab(writer, indent + 1)
 				tab(writer, indent + 1)
-				wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val)
-			}
-
-			for child in element.children {
-				print_element(writer, doc, child, indent + 1)
+				fmt.wprintf(writer, "[Attr] %v: %v\n", attr.key, attr.val)
 			}
 			}
 		} else if element.kind == .Comment {
 		} else if element.kind == .Comment {
-			wprintf(writer, "[COMMENT] %v\n", element.value)
+			fmt.wprintf(writer, "[COMMENT] %v\n", element.value)
 		}
 		}
 
 
 		return written, .None
 		return written, .None
@@ -289,8 +288,6 @@ doc_to_string :: proc(doc: ^xml.Document) -> (result: string) {
 
 
 @test
 @test
 run_tests :: proc(t: ^testing.T) {
 run_tests :: proc(t: ^testing.T) {
-	using fmt
-
 	for test in TESTS {
 	for test in TESTS {
 		path := test_file_path(test.filename)
 		path := test_file_path(test.filename)
 		log(t, fmt.tprintf("Trying to parse %v", path))
 		log(t, fmt.tprintf("Trying to parse %v", path))
@@ -305,11 +302,11 @@ run_tests :: proc(t: ^testing.T) {
 		crc32 := hash.crc32(tree_bytes)
 		crc32 := hash.crc32(tree_bytes)
 
 
 		failed := err != test.err
 		failed := err != test.err
-		err_msg := tprintf("Expected return value %v, got %v", test.err, err)
+		err_msg := fmt.tprintf("Expected return value %v, got %v", test.err, err)
 		expect(t, err == test.err, err_msg)
 		expect(t, err == test.err, err_msg)
 
 
 		failed |= crc32 != test.crc32
 		failed |= crc32 != test.crc32
-		err_msg  = tprintf("Expected CRC 0x%08x, got 0x%08x, with options %v", test.crc32, crc32, test.options)
+		err_msg  = fmt.tprintf("Expected CRC 0x%08x, got 0x%08x, with options %v", test.crc32, crc32, test.options)
 		expect(t, crc32 == test.crc32, err_msg)
 		expect(t, crc32 == test.crc32, err_msg)
 
 
 		if failed {
 		if failed {
@@ -317,7 +314,7 @@ run_tests :: proc(t: ^testing.T) {
 				Don't fully print big trees.
 				Don't fully print big trees.
 			*/
 			*/
 			tree_string = tree_string[:min(2_048, len(tree_string))]
 			tree_string = tree_string[:min(2_048, len(tree_string))]
-			println(tree_string)
+			fmt.println(tree_string)
 		}
 		}
 	}
 	}
 }
 }

+ 4 - 10
tests/core/math/linalg/glsl/test_linalg_glsl_math.odin

@@ -22,9 +22,6 @@ main :: proc() {
 
 
 @test
 @test
 test_fract_f32 :: proc(t: ^testing.T) {
 test_fract_f32 :: proc(t: ^testing.T) {
-
-	using math
-
 	r: f32
 	r: f32
 
 
 	Datum :: struct {
 	Datum :: struct {
@@ -35,8 +32,8 @@ test_fract_f32 :: proc(t: ^testing.T) {
 	@static data := []Datum{
 	@static data := []Datum{
 		{ 0, 10.5, 0.5 }, // Issue #1574 fract in linalg/glm is broken
 		{ 0, 10.5, 0.5 }, // Issue #1574 fract in linalg/glm is broken
 		{ 1, -10.5, -0.5 },
 		{ 1, -10.5, -0.5 },
-		{ 2, F32_MIN, F32_MIN }, // 0x1p-126
-		{ 3, -F32_MIN, -F32_MIN },
+		{ 2, math.F32_MIN, math.F32_MIN }, // 0x1p-126
+		{ 3, -math.F32_MIN, -math.F32_MIN },
 		{ 4, 0.0, 0.0 },
 		{ 4, 0.0, 0.0 },
 		{ 5, -0.0, -0.0 },
 		{ 5, -0.0, -0.0 },
 		{ 6, 1, 0.0 },
 		{ 6, 1, 0.0 },
@@ -54,9 +51,6 @@ test_fract_f32 :: proc(t: ^testing.T) {
 
 
 @test
 @test
 test_fract_f64 :: proc(t: ^testing.T) {
 test_fract_f64 :: proc(t: ^testing.T) {
-
-	using math
-
 	r: f64
 	r: f64
 
 
 	Datum :: struct {
 	Datum :: struct {
@@ -67,8 +61,8 @@ test_fract_f64 :: proc(t: ^testing.T) {
 	@static data := []Datum{
 	@static data := []Datum{
 		{ 0, 10.5, 0.5 }, // Issue #1574 fract in linalg/glm is broken
 		{ 0, 10.5, 0.5 }, // Issue #1574 fract in linalg/glm is broken
 		{ 1, -10.5, -0.5 },
 		{ 1, -10.5, -0.5 },
-		{ 2, F64_MIN, F64_MIN }, // 0x1p-1022
-		{ 3, -F64_MIN, -F64_MIN },
+		{ 2, math.F64_MIN, math.F64_MIN }, // 0x1p-1022
+		{ 3, -math.F64_MIN, -math.F64_MIN },
 		{ 4, 0.0, 0.0 },
 		{ 4, 0.0, 0.0 },
 		{ 5, -0.0, -0.0 },
 		{ 5, -0.0, -0.0 },
 		{ 6, 1, 0.0 },
 		{ 6, 1, 0.0 },

+ 73 - 94
tests/core/math/test_core_math.odin

@@ -43,11 +43,7 @@ main :: proc() {
 
 
 @test
 @test
 test_classify_f16 :: proc(t: ^testing.T) {
 test_classify_f16 :: proc(t: ^testing.T) {
-
-	using math
-	using Float_Class
-
-	r: Float_Class
+	r: math.Float_Class
 
 
 	Datum :: struct {
 	Datum :: struct {
 		i: int,
 		i: int,
@@ -55,38 +51,34 @@ test_classify_f16 :: proc(t: ^testing.T) {
 		e: math.Float_Class,
 		e: math.Float_Class,
 	}
 	}
 	@static data := []Datum{
 	@static data := []Datum{
-		{ 0, 1.2, Normal },
-		{ 1, 0h0001, Subnormal },
-		{ 2, 0.0, Zero },
-		{ 3, -0.0, Neg_Zero },
-		{ 4, SNAN_F16, NaN },
-		{ 5, QNAN_F16, NaN },
-		{ 6, INF_F16, Inf },
-		{ 7, NEG_INF_F16, Neg_Inf },
+		{ 0, 1.2, .Normal },
+		{ 1, 0h0001, .Subnormal },
+		{ 2, 0.0, .Zero },
+		{ 3, -0.0, .Neg_Zero },
+		{ 4, math.SNAN_F16, .NaN },
+		{ 5, math.QNAN_F16, .NaN },
+		{ 6, math.INF_F16, .Inf },
+		{ 7, math.NEG_INF_F16, .Neg_Inf },
 	}
 	}
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i)
 		assert(i == d.i)
-		r = classify_f16(d.v)
+		r = math.classify_f16(d.v)
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e))
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e))
 	}
 	}
 
 
 	/* Check all subnormals (exponent 0, 10-bit significand non-zero) */
 	/* Check all subnormals (exponent 0, 10-bit significand non-zero) */
-	for i :u16 = 1; i < 0x400; i += 1 {
-		v :f16 = transmute(f16)i
-		r = classify_f16(v)
-		e :Float_Class: Subnormal
+	for i in u16(1)..<0x400 {
+		v := transmute(f16)i
+		r = math.classify_f16(v)
+		e :: math.Float_Class.Subnormal
 		tc.expect(t, r == e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, v, r, e))
 		tc.expect(t, r == e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, v, r, e))
 	}
 	}
 }
 }
 
 
 @test
 @test
 test_classify_f32 :: proc(t: ^testing.T) {
 test_classify_f32 :: proc(t: ^testing.T) {
-
-	using math
-	using Float_Class
-
-	r: Float_Class
+	r: math.Float_Class
 
 
 	Datum :: struct {
 	Datum :: struct {
 		i: int,
 		i: int,
@@ -94,30 +86,26 @@ test_classify_f32 :: proc(t: ^testing.T) {
 		e: math.Float_Class,
 		e: math.Float_Class,
 	}
 	}
 	@static data := []Datum{
 	@static data := []Datum{
-		{ 0, 1.2, Normal },
-		{ 1, 0h0000_0001, Subnormal },
-		{ 2, 0.0, Zero },
-		{ 3, -0.0, Neg_Zero },
-		{ 4, SNAN_F32, NaN },
-		{ 5, QNAN_F32, NaN },
-		{ 6, INF_F32, Inf },
-		{ 7, NEG_INF_F32, Neg_Inf },
+		{ 0, 1.2, .Normal },
+		{ 1, 0h0000_0001, .Subnormal },
+		{ 2, 0.0, .Zero },
+		{ 3, -0.0, .Neg_Zero },
+		{ 4, math.SNAN_F32, .NaN },
+		{ 5, math.QNAN_F32, .NaN },
+		{ 6, math.INF_F32, .Inf },
+		{ 7, math.NEG_INF_F32, .Neg_Inf },
 	}
 	}
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i)
 		assert(i == d.i)
-		r = classify_f32(d.v)
+		r = math.classify_f32(d.v)
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e))
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e))
 	}
 	}
 }
 }
 
 
 @test
 @test
 test_classify_f64 :: proc(t: ^testing.T) {
 test_classify_f64 :: proc(t: ^testing.T) {
-
-	using math
-	using Float_Class
-
-	r: Float_Class
+	r: math.Float_Class
 
 
 	Datum :: struct {
 	Datum :: struct {
 		i: int,
 		i: int,
@@ -125,28 +113,25 @@ test_classify_f64 :: proc(t: ^testing.T) {
 		e: math.Float_Class,
 		e: math.Float_Class,
 	}
 	}
 	@static data := []Datum{
 	@static data := []Datum{
-		{ 0, 1.2, Normal },
-		{ 1, 0h0000_0000_0000_0001, Subnormal },
-		{ 2, 0.0, Zero },
-		{ 3, -0.0, Neg_Zero },
-		{ 4, SNAN_F64, NaN },
-		{ 5, QNAN_F64, NaN },
-		{ 6, INF_F64, Inf },
-		{ 7, NEG_INF_F64, Neg_Inf },
+		{ 0, 1.2, .Normal },
+		{ 1, 0h0000_0000_0000_0001, .Subnormal },
+		{ 2, 0.0, .Zero },
+		{ 3, -0.0, .Neg_Zero },
+		{ 4, math.SNAN_F64, .NaN },
+		{ 5, math.QNAN_F64, .NaN },
+		{ 6, math.INF_F64, .Inf },
+		{ 7, math.NEG_INF_F64, .Neg_Inf },
 	}
 	}
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i)
 		assert(i == d.i)
-		r = classify_f64(d.v)
+		r = math.classify_f64(d.v)
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e))
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %v != %v", i, #procedure, d.v, r, d.e))
 	}
 	}
 }
 }
 
 
 @test
 @test
 test_trunc_f16 :: proc(t: ^testing.T) {
 test_trunc_f16 :: proc(t: ^testing.T) {
-
-	using math
-
 	r, v: f16
 	r, v: f16
 
 
 	Datum :: struct {
 	Datum :: struct {
@@ -158,16 +143,16 @@ test_trunc_f16 :: proc(t: ^testing.T) {
 		{ 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken
 		{ 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken
 		{ 1, -10.5, -10 },
 		{ 1, -10.5, -10 },
 
 
-		{ 2, F16_MAX, F16_MAX },
-		{ 3, -F16_MAX, -F16_MAX },
-		{ 4, F16_MIN, 0.0 },
-		{ 5, -F16_MIN, -0.0 },
+		{ 2, math.F16_MAX, math.F16_MAX },
+		{ 3, -math.F16_MAX, -math.F16_MAX },
+		{ 4, math.F16_MIN, 0.0 },
+		{ 5, -math.F16_MIN, -0.0 },
 		{ 6, 0.0, 0.0 },
 		{ 6, 0.0, 0.0 },
 		{ 7, -0.0, -0.0 },
 		{ 7, -0.0, -0.0 },
 		{ 8, 1, 1 },
 		{ 8, 1, 1 },
 		{ 9, -1, -1 },
 		{ 9, -1, -1 },
-		{ 10, INF_F16, INF_F16 },
-		{ 11, NEG_INF_F16, NEG_INF_F16 },
+		{ 10, math.INF_F16, math.INF_F16 },
+		{ 11, math.NEG_INF_F16, math.NEG_INF_F16 },
 
 
 		/* From https://en.wikipedia.org/wiki/Half-precision_floating-point_format */
 		/* From https://en.wikipedia.org/wiki/Half-precision_floating-point_format */
 		{ 12, 0h3C01, 1 }, // 0x1.004p+0 (smallest > 1)
 		{ 12, 0h3C01, 1 }, // 0x1.004p+0 (smallest > 1)
@@ -185,24 +170,21 @@ test_trunc_f16 :: proc(t: ^testing.T) {
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i)
 		assert(i == d.i)
-		r = trunc_f16(d.v)
+		r = math.trunc_f16(d.v)
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e))
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e))
 	}
 	}
 
 
-	v = SNAN_F16
-	r = trunc_f16(v)
-	tc.expect(t, is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
+	v = math.SNAN_F16
+	r = math.trunc_f16(v)
+	tc.expect(t, math.is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
 
 
-	v = QNAN_F16
-	r = trunc_f16(v)
-	tc.expect(t, is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
+	v = math.QNAN_F16
+	r = math.trunc_f16(v)
+	tc.expect(t, math.is_nan_f16(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
 }
 }
 
 
 @test
 @test
 test_trunc_f32 :: proc(t: ^testing.T) {
 test_trunc_f32 :: proc(t: ^testing.T) {
-
-	using math
-
 	r, v: f32
 	r, v: f32
 
 
 	Datum :: struct {
 	Datum :: struct {
@@ -214,16 +196,16 @@ test_trunc_f32 :: proc(t: ^testing.T) {
 		{ 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken
 		{ 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken
 		{ 1, -10.5, -10 },
 		{ 1, -10.5, -10 },
 
 
-		{ 2, F32_MAX, F32_MAX },
-		{ 3, -F32_MAX, -F32_MAX },
-		{ 4, F32_MIN, 0.0 },
-		{ 5, -F32_MIN, -0.0 },
+		{ 2, math.F32_MAX, math.F32_MAX },
+		{ 3, -math.F32_MAX, -math.F32_MAX },
+		{ 4, math.F32_MIN, 0.0 },
+		{ 5, -math.F32_MIN, -0.0 },
 		{ 6, 0.0, 0.0 },
 		{ 6, 0.0, 0.0 },
 		{ 7, -0.0, -0.0 },
 		{ 7, -0.0, -0.0 },
 		{ 8, 1, 1 },
 		{ 8, 1, 1 },
 		{ 9, -1, -1 },
 		{ 9, -1, -1 },
-		{ 10, INF_F32, INF_F32 },
-		{ 11, NEG_INF_F32, NEG_INF_F32 },
+		{ 10, math.INF_F32, math.INF_F32 },
+		{ 11, math.NEG_INF_F32, math.NEG_INF_F32 },
 
 
 		/* From https://en.wikipedia.org/wiki/Single-precision_floating-point_format */
 		/* From https://en.wikipedia.org/wiki/Single-precision_floating-point_format */
 		{ 12, 0h3F80_0001, 1 }, // 0x1.000002p+0 (smallest > 1)
 		{ 12, 0h3F80_0001, 1 }, // 0x1.000002p+0 (smallest > 1)
@@ -250,24 +232,21 @@ test_trunc_f32 :: proc(t: ^testing.T) {
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i)
 		assert(i == d.i)
-		r = trunc_f32(d.v)
+		r = math.trunc_f32(d.v)
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e))
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e))
 	}
 	}
 
 
-	v = SNAN_F32
-	r = trunc_f32(v)
-	tc.expect(t, is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
+	v = math.SNAN_F32
+	r = math.trunc_f32(v)
+	tc.expect(t, math.is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
 
 
-	v = QNAN_F32
-	r = trunc_f32(v)
-	tc.expect(t, is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
+	v = math.QNAN_F32
+	r = math.trunc_f32(v)
+	tc.expect(t, math.is_nan_f32(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
 }
 }
 
 
 @test
 @test
 test_trunc_f64 :: proc(t: ^testing.T) {
 test_trunc_f64 :: proc(t: ^testing.T) {
-
-	using math
-
 	r, v: f64
 	r, v: f64
 
 
 	Datum :: struct {
 	Datum :: struct {
@@ -279,16 +258,16 @@ test_trunc_f64 :: proc(t: ^testing.T) {
 		{ 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken
 		{ 0, 10.5, 10 }, // Issue #1574 fract in linalg/glm is broken
 		{ 1, -10.5, -10 },
 		{ 1, -10.5, -10 },
 
 
-		{ 2, F64_MAX, F64_MAX },
-		{ 3, -F64_MAX, -F64_MAX },
-		{ 4, F64_MIN, 0.0 },
-		{ 5, -F64_MIN, -0.0 },
+		{ 2, math.F64_MAX, math.F64_MAX },
+		{ 3, -math.F64_MAX, -math.F64_MAX },
+		{ 4, math.F64_MIN, 0.0 },
+		{ 5, -math.F64_MIN, -0.0 },
 		{ 6, 0.0, 0.0 },
 		{ 6, 0.0, 0.0 },
 		{ 7, -0.0, -0.0 },
 		{ 7, -0.0, -0.0 },
 		{ 8, 1, 1 },
 		{ 8, 1, 1 },
 		{ 9, -1, -1 },
 		{ 9, -1, -1 },
-		{ 10, INF_F64, INF_F64 },
-		{ 11, NEG_INF_F64, NEG_INF_F64 },
+		{ 10, math.INF_F64, math.INF_F64 },
+		{ 11, math.NEG_INF_F64, math.NEG_INF_F64 },
 
 
 		/* From https://en.wikipedia.org/wiki/Double-precision_floating-point_format */
 		/* From https://en.wikipedia.org/wiki/Double-precision_floating-point_format */
 		{ 12, 0h3FF0_0000_0000_0001, 1 }, // 0x1.0000000000001p+0 (smallest > 1)
 		{ 12, 0h3FF0_0000_0000_0001, 1 }, // 0x1.0000000000001p+0 (smallest > 1)
@@ -315,17 +294,17 @@ test_trunc_f64 :: proc(t: ^testing.T) {
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i)
 		assert(i == d.i)
-		r = trunc_f64(d.v)
+		r = math.trunc_f64(d.v)
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e))
 		tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(%h) -> %h != %h", i, #procedure, d.v, r, d.e))
 	}
 	}
 
 
-	v = SNAN_F64
-	r = trunc_f64(v)
-	tc.expect(t, is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
+	v = math.SNAN_F64
+	r = math.trunc_f64(v)
+	tc.expect(t, math.is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
 
 
-	v = QNAN_F64
-	r = trunc_f64(v)
-	tc.expect(t, is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
+	v = math.QNAN_F64
+	r = math.trunc_f64(v)
+	tc.expect(t, math.is_nan_f64(r), fmt.tprintf("%s(%f) -> %f != NaN", #procedure, v, r))
 }
 }
 
 
 
 

+ 6 - 12
tests/core/path/filepath/test_core_filepath.odin

@@ -22,9 +22,6 @@ main :: proc() {
 
 
 @test
 @test
 test_split_list_windows :: proc(t: ^testing.T) {
 test_split_list_windows :: proc(t: ^testing.T) {
-
-	using filepath
-
 	Datum :: struct {
 	Datum :: struct {
 		i: int,
 		i: int,
 		v: string,
 		v: string,
@@ -43,7 +40,7 @@ test_split_list_windows :: proc(t: ^testing.T) {
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i, fmt.tprintf("wrong data index: i %d != d.i %d\n", i, d.i))
 		assert(i == d.i, fmt.tprintf("wrong data index: i %d != d.i %d\n", i, d.i))
-		r := split_list(d.v)
+		r := filepath.split_list(d.v)
 		defer delete(r)
 		defer delete(r)
 		tc.expect(t, len(r) == len(d.e), fmt.tprintf("i:%d %s(%s) len(r) %d != len(d.e) %d",
 		tc.expect(t, len(r) == len(d.e), fmt.tprintf("i:%d %s(%s) len(r) %d != len(d.e) %d",
 													 i, #procedure, d.v, len(r), len(d.e)))
 													 i, #procedure, d.v, len(r), len(d.e)))
@@ -57,12 +54,12 @@ test_split_list_windows :: proc(t: ^testing.T) {
 
 
 	{
 	{
 		v := ""
 		v := ""
-		r := split_list(v)
+		r := filepath.split_list(v)
 		tc.expect(t, r == nil, fmt.tprintf("%s(%s) -> %v != nil", #procedure, v, r))
 		tc.expect(t, r == nil, fmt.tprintf("%s(%s) -> %v != nil", #procedure, v, r))
 	}
 	}
 	{
 	{
 		v := "a"
 		v := "a"
-		r := split_list(v)
+		r := filepath.split_list(v)
 		defer delete(r)
 		defer delete(r)
 		tc.expect(t, len(r) == 1, fmt.tprintf("%s(%s) len(r) %d != 1", #procedure, v, len(r)))
 		tc.expect(t, len(r) == 1, fmt.tprintf("%s(%s) len(r) %d != 1", #procedure, v, len(r)))
 		if len(r) == 1 {
 		if len(r) == 1 {
@@ -73,9 +70,6 @@ test_split_list_windows :: proc(t: ^testing.T) {
 
 
 @test
 @test
 test_split_list_unix :: proc(t: ^testing.T) {
 test_split_list_unix :: proc(t: ^testing.T) {
-
-	using filepath
-
 	Datum :: struct {
 	Datum :: struct {
 		i: int,
 		i: int,
 		v: string,
 		v: string,
@@ -94,7 +88,7 @@ test_split_list_unix :: proc(t: ^testing.T) {
 
 
 	for d, i in data {
 	for d, i in data {
 		assert(i == d.i, fmt.tprintf("wrong data index: i %d != d.i %d\n", i, d.i))
 		assert(i == d.i, fmt.tprintf("wrong data index: i %d != d.i %d\n", i, d.i))
-		r := split_list(d.v)
+		r := filepath.split_list(d.v)
 		defer delete(r)
 		defer delete(r)
 		tc.expect(t, len(r) == len(d.e), fmt.tprintf("i:%d %s(%s) len(r) %d != len(d.e) %d",
 		tc.expect(t, len(r) == len(d.e), fmt.tprintf("i:%d %s(%s) len(r) %d != len(d.e) %d",
 													 i, #procedure, d.v, len(r), len(d.e)))
 													 i, #procedure, d.v, len(r), len(d.e)))
@@ -108,12 +102,12 @@ test_split_list_unix :: proc(t: ^testing.T) {
 
 
 	{
 	{
 		v := ""
 		v := ""
-		r := split_list(v)
+		r := filepath.split_list(v)
 		tc.expect(t, r == nil, fmt.tprintf("%s(%s) -> %v != nil", #procedure, v, r))
 		tc.expect(t, r == nil, fmt.tprintf("%s(%s) -> %v != nil", #procedure, v, r))
 	}
 	}
 	{
 	{
 		v := "a"
 		v := "a"
-		r := split_list(v)
+		r := filepath.split_list(v)
 		defer delete(r)
 		defer delete(r)
 		tc.expect(t, len(r) == 1, fmt.tprintf("%s(%s) len(r) %d != 1", #procedure, v, len(r)))
 		tc.expect(t, len(r) == 1, fmt.tprintf("%s(%s) len(r) %d != 1", #procedure, v, len(r)))
 		if len(r) == 1 {
 		if len(r) == 1 {

+ 16 - 20
tests/core/reflect/test_core_reflect.odin

@@ -19,8 +19,6 @@ main :: proc() {
 
 
 @test
 @test
 test_as_u64 :: proc(t: ^testing.T) {
 test_as_u64 :: proc(t: ^testing.T) {
-	using reflect
-
 	{
 	{
 		/* i8 */
 		/* i8 */
 		Datum :: struct { i: int, v: i8, e: u64 }
 		Datum :: struct { i: int, v: i8, e: u64 }
@@ -32,7 +30,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i8 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i8 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i8 %v) -> %v (0x%X) != %v (0x%X)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i8 %v) -> %v (0x%X) != %v (0x%X)\n",
 												i, #procedure, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, r, r, d.e, d.e))
@@ -49,7 +47,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i16 %v) -> %v (0x%X) != %v (0x%X)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i16 %v) -> %v (0x%X) != %v (0x%X)\n",
 												i, #procedure, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, r, r, d.e, d.e))
@@ -66,7 +64,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i32 %v) -> %v (0x%X) != %v (0x%X)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i32 %v) -> %v (0x%X) != %v (0x%X)\n",
 												i, #procedure, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, r, r, d.e, d.e))
@@ -83,7 +81,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i64 %v) -> %v (0x%X) != %v (0x%X)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i64 %v) -> %v (0x%X) != %v (0x%X)\n",
 												i, #procedure, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, r, r, d.e, d.e))
@@ -103,7 +101,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i128 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i128 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i128 %v) -> %v (0x%X) != %v (0x%X)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i128 %v) -> %v (0x%X) != %v (0x%X)\n",
 												i, #procedure, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, r, r, d.e, d.e))
@@ -119,7 +117,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f16 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f16 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -133,7 +131,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f32 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f32 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -147,7 +145,7 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_u64(d.v)
+			r, valid := reflect.as_u64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -156,8 +154,6 @@ test_as_u64 :: proc(t: ^testing.T) {
 
 
 @test
 @test
 test_as_f64 :: proc(t: ^testing.T) {
 test_as_f64 :: proc(t: ^testing.T) {
-	using reflect
-
 	{
 	{
 		/* i8 */
 		/* i8 */
 		Datum :: struct { i: int, v: i8, e: f64 }
 		Datum :: struct { i: int, v: i8, e: f64 }
@@ -169,7 +165,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i8 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i8 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i8 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i8 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -185,7 +181,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i16 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i16 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -201,7 +197,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i32 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i32 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -217,7 +213,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}
@@ -234,7 +230,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i128 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(i128 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i128 %v) -> %v (%H) != %v (%H)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(i128 %v) -> %v (%H) != %v (%H)\n",
 												i, #procedure, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, r, r, d.e, d.e))
@@ -250,7 +246,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f16 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f16 %v (%H)) -> %v (%H) != %v (%H)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f16 %v (%H)) -> %v (%H) != %v (%H)\n",
 												i, #procedure, d.v, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, d.v, r, r, d.e, d.e))
@@ -265,7 +261,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f32 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f32 %v (%H)) -> %v (%H) != %v (%H)\n",
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f32 %v (%H)) -> %v (%H) != %v (%H)\n",
 												i, #procedure, d.v, d.v, r, r, d.e, d.e))
 												i, #procedure, d.v, d.v, r, r, d.e, d.e))
@@ -280,7 +276,7 @@ test_as_f64 :: proc(t: ^testing.T) {
 
 
 		for d, i in data {
 		for d, i in data {
 			assert(i == d.i)
 			assert(i == d.i)
-			r, valid := as_f64(d.v)
+			r, valid := reflect.as_f64(d.v)
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, valid, fmt.tprintf("i:%d %s(f64 %v) !valid\n", i, #procedure, d.v))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 			tc.expect(t, r == d.e, fmt.tprintf("i:%d %s(f64 %v) -> %v != %v\n", i, #procedure, d.v, r, d.e))
 		}
 		}

+ 2 - 6
tests/core/text/i18n/test_core_text_i18n.odin

@@ -118,8 +118,6 @@ TESTS := []Test_Suite{
 
 
 @test
 @test
 tests :: proc(t: ^testing.T) {
 tests :: proc(t: ^testing.T) {
-	using fmt
-
 	cat: ^i18n.Translation
 	cat: ^i18n.Translation
 	err: i18n.Error
 	err: i18n.Error
 
 
@@ -142,8 +140,6 @@ tests :: proc(t: ^testing.T) {
 }
 }
 
 
 main :: proc() {
 main :: proc() {
-	using fmt
-
 	track: mem.Tracking_Allocator
 	track: mem.Tracking_Allocator
 	mem.tracking_allocator_init(&track, context.allocator)
 	mem.tracking_allocator_init(&track, context.allocator)
 	context.allocator = mem.tracking_allocator(&track)
 	context.allocator = mem.tracking_allocator(&track)
@@ -157,9 +153,9 @@ main :: proc() {
 	}
 	}
 
 
 	if len(track.allocation_map) > 0 {
 	if len(track.allocation_map) > 0 {
-		println()
+		fmt.println()
 		for _, v in track.allocation_map {
 		for _, v in track.allocation_map {
-			printf("%v Leaked %v bytes.\n", v.location, v.size)
+			fmt.printf("%v Leaked %v bytes.\n", v.location, v.size)
 		}
 		}
 	}
 	}
 }
 }

+ 2 - 2
vendor/darwin/Metal/MetalClasses.odin

@@ -5388,8 +5388,8 @@ Device_newBufferWithLength :: #force_inline proc "c" (self: ^Device, length: NS.
 
 
 @(objc_type=Device, objc_name="newBuffer")
 @(objc_type=Device, objc_name="newBuffer")
 Device_newBuffer :: proc{
 Device_newBuffer :: proc{
-	// Device_newBufferWithBytes,
-	// Device_newBufferWithBytesNoCopy,
+	Device_newBufferWithBytes,
+	Device_newBufferWithBytesNoCopy,
 	Device_newBufferWithSlice,
 	Device_newBufferWithSlice,
 	Device_newBufferWithSliceNoCopy,
 	Device_newBufferWithSliceNoCopy,
 	Device_newBufferWithLength,
 	Device_newBufferWithLength,

+ 1 - 0
vendor/fontstash/fontstash.odin

@@ -1,4 +1,5 @@
 //+build windows, linux, darwin
 //+build windows, linux, darwin
+//+vet !using-param
 package fontstash
 package fontstash
 
 
 import "core:runtime"
 import "core:runtime"