Browse Source

Merge pull request #3620 from Kelimion/deprecate_odin_printer

Deprecate `core:odin/printer` in favor of OLS.
Jeroen van Rijn 1 year ago
parent
commit
d99e1616cf

+ 3 - 0
core/odin/format/deprecated.odin

@@ -0,0 +1,3 @@
+package odin_format
+
+#panic("The format package has been deprecated. Please look at https://github.com/DanielGavin/ols")

+ 0 - 41
core/odin/format/format.odin

@@ -1,41 +0,0 @@
-package odin_format
-
-import "core:odin/printer"
-import "core:odin/parser"
-import "core:odin/ast"
-
-default_style := printer.default_style
-
-simplify :: proc(file: ^ast.File) {
-
-}
-
-format :: proc(filepath: string, source: string, config: printer.Config, parser_flags := parser.Flags{}, allocator := context.allocator) -> (string, bool) {
-	config := config
-
-	pkg := ast.Package {
-		kind = .Normal,
-	}
-
-	file := ast.File {
-		pkg = &pkg,
-		src = source,
-		fullpath = filepath,
-	}
-
-	config.newline_limit      = clamp(config.newline_limit, 0, 16)
-	config.spaces             = clamp(config.spaces, 1, 16)
-	config.align_length_break = clamp(config.align_length_break, 0, 64)
-
-	p := parser.default_parser(parser_flags)
-
-	ok := parser.parse_file(&p, &file)
-
-	if !ok || file.syntax_error_count > 0  {
-		return {}, false
-	}
-
-	prnt := printer.make_printer(config, allocator)
-
-	return printer.print(&prnt, &file), true
-}

+ 3 - 0
core/odin/printer/deprecated.odin

@@ -0,0 +1,3 @@
+package odin_printer
+
+#panic("The printer package has been deprecated. Please look at https://github.com/DanielGavin/ols")

+ 0 - 922
core/odin/printer/printer.odin

@@ -1,922 +0,0 @@
-package odin_printer
-
-import "core:odin/ast"
-import "core:odin/tokenizer"
-import "core:strings"
-import "core:fmt"
-import "core:mem"
-
-Type_Enum :: enum {Line_Comment, Value_Decl, Switch_Stmt, Struct, Assign, Call, Enum, If, For, Proc_Lit}
-
-Line_Type :: bit_set[Type_Enum]
-
-/*
-	Represents an unwrapped line
-*/
-Line :: struct {
-	format_tokens: [dynamic]Format_Token,
-	finalized:     bool,
-	used:          bool,
-	depth:         int,
-	types:         Line_Type, //for performance, so you don't have to verify what types are in it by going through the tokens - might give problems when adding linebreaking
-}
-
-/*
-	Represents a singular token in a unwrapped line
-*/
-Format_Token :: struct {
-	kind:            tokenizer.Token_Kind,
-	text:            string,
-	type:            Type_Enum,
-	spaces_before:   int,
-	parameter_count: int,
-}
-
-Printer :: struct {
-	string_builder:       strings.Builder,
-	config:               Config,
-	depth:                int, //the identation depth
-	comments:             [dynamic]^ast.Comment_Group,
-	latest_comment_index: int,
-	allocator:            mem.Allocator,
-	file:                 ^ast.File,
-	source_position:      tokenizer.Pos,
-	last_source_position: tokenizer.Pos,
-	lines:                [dynamic]Line, //need to look into a better data structure, one that can handle inserting lines rather than appending
-	skip_semicolon:       bool,
-	current_line:         ^Line,
-	current_line_index:   int,
-	last_line_index:      int,
-	last_token:           ^Format_Token,
-	merge_next_token:     bool,
-	space_next_token:     bool,
-	debug:                bool,
-}
-
-Config :: struct {
-	spaces:               int,  //Spaces per indentation
-	newline_limit:        int,  //The limit of newlines between statements and declarations.
-	tabs:                 bool, //Enable or disable tabs
-	convert_do:           bool, //Convert all do statements to brace blocks
-	semicolons:           bool, //Enable semicolons
-	split_multiple_stmts: bool,
-	align_switch:         bool,
-	brace_style:          Brace_Style,
-	align_assignments:    bool,
-	align_structs:        bool,
-	align_style:          Alignment_Style,
-	align_enums:          bool,
-	align_length_break:   int,
-	indent_cases:         bool,
-	newline_style:        Newline_Style,
-}
-
-Brace_Style :: enum {
-	_1TBS,
-	Allman,
-	Stroustrup,
-	K_And_R,
-}
-
-Block_Type :: enum {
-	None,
-	If_Stmt,
-	Proc,
-	Generic,
-	Comp_Lit,
-	Switch_Stmt,
-}
-
-Alignment_Style :: enum {
-	Align_On_Type_And_Equals,
-	Align_On_Colon_And_Equals,
-}
-
-Newline_Style :: enum {
-	CRLF,
-	LF,
-}
-
-default_style := Config {
-	spaces               = 4,
-	newline_limit        = 2,
-	convert_do           = false,
-	semicolons           = false,
-	tabs                 = true,
-	brace_style          = ._1TBS,
-	split_multiple_stmts = true,
-	align_assignments    = true,
-	align_style          = .Align_On_Type_And_Equals,
-	indent_cases         = false,
-	align_switch         = true,
-	align_structs        = true,
-	align_enums          = true,
-	newline_style        = .CRLF,
-	align_length_break   = 9,
-}
-
-make_printer :: proc(config: Config, allocator := context.allocator) -> Printer {
-	return {
-		config = config,
-		allocator = allocator,
-		debug = false,
-	}
-}
-
-print :: proc(p: ^Printer, file: ^ast.File) -> string {
-	p.comments = file.comments
-
-	if len(file.decls) > 0 {
-		p.lines = make([dynamic]Line, 0, (file.decls[len(file.decls) - 1].end.line - file.decls[0].pos.line) * 2, context.temp_allocator)
-	}
-
-	set_source_position(p, file.pkg_token.pos)
-
-	p.last_source_position.line = 1
-
-	set_line(p, 0)
-
-	push_generic_token(p, .Package, 0)
-	push_ident_token(p, file.pkg_name, 1)
-
-	for decl in file.decls {
-		visit_decl(p, cast(^ast.Decl)decl)
-	}
-
-	if len(p.comments) > 0 {
-		infinite := p.comments[len(p.comments) - 1].end
-		infinite.offset = 9999999
-		push_comments(p, infinite)
-	}
-
-	fix_lines(p)
-
-	builder := strings.builder_make(0, 5 * mem.Megabyte, p.allocator)
-
-	last_line := 0
-
-	newline: string
-
-	if p.config.newline_style == .LF {
-		newline = "\n"
-	} else {
-		newline = "\r\n"
-	}
-
-	for line, line_index in p.lines {
-		diff_line := line_index - last_line
-
-		for i := 0; i < diff_line; i += 1 {
-			strings.write_string(&builder, newline)
-		}
-
-		if p.config.tabs {
-			for i := 0; i < line.depth; i += 1 {
-				strings.write_byte(&builder, '\t')
-			}
-		} else {
-			for i := 0; i < line.depth * p.config.spaces; i += 1 {
-				strings.write_byte(&builder, ' ')
-			}
-		}
-
-		if p.debug {
-			strings.write_string(&builder, fmt.tprintf("line %v: ", line_index))
-		}
-
-		for format_token in line.format_tokens {
-
-			for i := 0; i < format_token.spaces_before; i += 1 {
-				strings.write_byte(&builder, ' ')
-			}
-
-			strings.write_string(&builder, format_token.text)
-		}
-
-		last_line = line_index
-	}
-
-	strings.write_string(&builder, newline)
-
-	return strings.to_string(builder)
-}
-
-fix_lines :: proc(p: ^Printer) {
-	align_var_decls(p)
-	format_generic(p)
-	align_comments(p) //align them last since they rely on the other alignments
-}
-
-format_value_decl :: proc(p: ^Printer, index: int) {
-
-	eq_found := false
-	eq_token: Format_Token
-	eq_line:  int
-	largest := 0
-
-	found_eq: for line, line_index in p.lines[index:] {
-		for format_token in line.format_tokens {
-
-			largest += len(format_token.text) + format_token.spaces_before
-
-			if format_token.kind == .Eq {
-				eq_token = format_token
-				eq_line = line_index + index
-				eq_found = true
-				break found_eq
-			}
-		}
-	}
-
-	if !eq_found {
-		return
-	}
-
-	align_next := false
-
-	//check to see if there is a binary operator in the last token(this is guaranteed by the ast visit), otherwise it's not multilined
-	for line in p.lines[eq_line:] {
-
-		if len(line.format_tokens) == 0 {
-			break
-		}
-
-		if align_next {
-			line.format_tokens[0].spaces_before = largest + 1
-			align_next = false
-		}
-
-		kind := find_last_token(line.format_tokens).kind
-
-		if tokenizer.Token_Kind.B_Operator_Begin < kind && kind <= tokenizer.Token_Kind.Cmp_Or {
-			align_next = true
-		}
-
-		if !align_next {
-			break
-		}
-	}
-}
-
-find_last_token :: proc(format_tokens: [dynamic]Format_Token) -> Format_Token {
-
-	for i := len(format_tokens) - 1; i >= 0; i -= 1 {
-
-		if format_tokens[i].kind != .Comment {
-			return format_tokens[i]
-		}
-	}
-
-	panic("not possible")
-}
-
-format_assignment :: proc(p: ^Printer, index: int) {
-}
-
-format_call :: proc(p: ^Printer, line_index: int, format_index: int) {
-
-	paren_found := false
-	paren_token:       Format_Token
-	paren_line:        int
-	paren_token_index: int
-	largest := 0
-
-	found_paren: for line, i in p.lines[line_index:] {
-		for format_token, j in line.format_tokens {
-
-			largest += len(format_token.text) + format_token.spaces_before
-
-			if i == 0 && j < format_index {
-				continue
-			}
-
-			if format_token.kind == .Open_Paren && format_token.type == .Call {
-				paren_token = format_token
-				paren_line = line_index + i
-				paren_found = true
-				paren_token_index = j
-				break found_paren
-			}
-		}
-	}
-
-	if !paren_found {
-		panic("Should not be possible")
-	}
-
-	paren_count := 1
-	done        := false
-
-	for line in p.lines[paren_line:] {
-
-		if len(line.format_tokens) == 0 {
-			continue
-		}
-
-		for format_token, i in line.format_tokens {
-
-			if format_token.kind == .Comment {
-				continue
-			}
-
-			if line_index == 0 && i <= paren_token_index {
-				continue
-			}
-
-			if format_token.kind == .Open_Paren {
-				paren_count += 1
-			} else if format_token.kind == .Close_Paren {
-				paren_count -= 1
-			}
-
-			if paren_count == 0 {
-				done = true
-			}
-		}
-
-		if line_index != 0 {
-			line.format_tokens[0].spaces_before = largest
-		}
-
-		if done {
-			return
-		}
-	}
-}
-
-format_keyword_to_brace :: proc(p: ^Printer, line_index: int, format_index: int, keyword: tokenizer.Token_Kind) {
-
-	keyword_found := false
-	keyword_token: Format_Token
-	keyword_line:  int
-
-	largest := 0
-	brace_count := 0
-	done        := false
-
-	found_keyword: for line, i in p.lines[line_index:] {
-		for format_token in line.format_tokens {
-
-			largest += len(format_token.text) + format_token.spaces_before
-
-			if format_token.kind == keyword {
-				keyword_token = format_token
-				keyword_line = line_index + i
-				keyword_found = true
-				break found_keyword
-			}
-		}
-	}
-
-	if !keyword_found {
-		panic("Should not be possible")
-	}
-
-	for line, line_idx in p.lines[keyword_line:] {
-
-		if len(line.format_tokens) == 0 {
-			continue
-		}
-
-		for format_token, i in line.format_tokens {
-
-			if format_token.kind == .Comment {
-				break
-			} else if format_token.kind == .Undef {
-				return
-			}
-
-			if line_idx == 0 && i <= format_index {
-				continue
-			}
-
-			if format_token.kind == .Open_Brace {
-				brace_count += 1
-			} else if format_token.kind == .Close_Brace {
-				brace_count -= 1
-			}
-
-			if brace_count == 1 {
-				done = true
-			}
-		}
-
-		if line_idx != 0 {
-			line.format_tokens[0].spaces_before = largest + 1
-		}
-
-		if done {
-			return
-		}
-	}
-}
-
-format_generic :: proc(p: ^Printer) {
-	next_struct_line := 0
-
-	for line, line_index in p.lines {
-
-		if len(line.format_tokens) <= 0 {
-			continue
-		}
-
-		for format_token, token_index in line.format_tokens {
-			#partial switch format_token.kind {
-			case .For, .If, .When, .Switch:
-				format_keyword_to_brace(p, line_index, token_index, format_token.kind)
-			case .Proc:
-				if format_token.type == .Proc_Lit {
-					format_keyword_to_brace(p, line_index, token_index, format_token.kind)
-				}
-			case:
-				if format_token.type == .Call {
-					format_call(p, line_index, token_index)
-				}
-			}
-		}
-
-		if .Switch_Stmt in line.types && p.config.align_switch {
-			align_switch_stmt(p, line_index)
-		}
-
-		if .Enum in line.types && p.config.align_enums {
-			align_enum(p, line_index)
-		}
-
-		if .Struct in line.types && p.config.align_structs && next_struct_line <= 0 {
-			next_struct_line = align_struct(p, line_index)
-		}
-
-		if .Value_Decl in line.types {
-			format_value_decl(p, line_index)
-		}
-
-		if .Assign in line.types {
-			format_assignment(p, line_index)
-		}
-
-		next_struct_line -= 1
-	}
-}
-
-align_var_decls :: proc(p: ^Printer) {
-
-	current_line:        int
-	current_typed:       bool
-	current_not_mutable: bool
-
-	largest_lhs := 0
-	largest_rhs := 0
-
-	TokenAndLength :: struct {
-		format_token: ^Format_Token,
-		length:       int,
-	}
-
-	colon_tokens := make([dynamic]TokenAndLength, 0, 10, context.temp_allocator)
-	type_tokens  := make([dynamic]TokenAndLength, 0, 10, context.temp_allocator)
-	equal_tokens := make([dynamic]TokenAndLength, 0, 10, context.temp_allocator)
-
-	for line, line_index in p.lines {
-
-		//It is only possible to align value decls that are one one line, otherwise just ignore them
-		if .Value_Decl not_in line.types {
-			continue
-		}
-
-		typed         := true
-		not_mutable   := false
-		continue_flag := false
-
-		for i := 0; i < len(line.format_tokens); i += 1 {
-			if line.format_tokens[i].kind == .Colon && line.format_tokens[min(i + 1, len(line.format_tokens) - 1)].kind == .Eq {
-				typed = false
-			}
-
-			if line.format_tokens[i].kind == .Colon && line.format_tokens[min(i + 1, len(line.format_tokens) - 1)].kind == .Colon {
-				not_mutable = true
-			}
-
-			if line.format_tokens[i].kind == .Union ||
-			   line.format_tokens[i].kind == .Enum ||
-			   line.format_tokens[i].kind == .Struct ||
-			   line.format_tokens[i].kind == .For ||
-			   line.format_tokens[i].kind == .If ||
-			   line.format_tokens[i].kind == .Comment {
-				continue_flag = true
-			}
-
-			//enforced undef is always on the last line, if it exists
-			if line.format_tokens[i].kind == .Proc && line.format_tokens[len(line.format_tokens)-1].kind != .Undef {
-				continue_flag = true
-			}
-
-		}
-
-		if continue_flag {
-			continue
-		}
-
-		if line_index != current_line + 1 || typed != current_typed || not_mutable != current_not_mutable {
-
-			if p.config.align_style == .Align_On_Colon_And_Equals || !current_typed || current_not_mutable {
-				for colon_token in colon_tokens {
-					colon_token.format_token.spaces_before = largest_lhs - colon_token.length + 1
-				}
-			} else if p.config.align_style == .Align_On_Type_And_Equals {
-				for type_token in type_tokens {
-					type_token.format_token.spaces_before = largest_lhs - type_token.length + 1
-				}
-			}
-
-			if current_typed {
-				for equal_token in equal_tokens {
-					equal_token.format_token.spaces_before = largest_rhs - equal_token.length + 1
-				}
-			} else {
-				for equal_token in equal_tokens {
-					equal_token.format_token.spaces_before = 0
-				}
-			}
-
-			clear(&colon_tokens)
-			clear(&type_tokens)
-			clear(&equal_tokens)
-
-			largest_rhs = 0
-			largest_lhs = 0
-			current_typed = typed
-			current_not_mutable = not_mutable
-		}
-
-		current_line = line_index
-
-		current_token_index := 0
-		lhs_length          := 0
-		rhs_length          := 0
-
-		//calcuate the length of lhs of a value decl i.e. `a, b:`
-		for; current_token_index < len(line.format_tokens); current_token_index += 1 {
-
-			lhs_length += len(line.format_tokens[current_token_index].text) + line.format_tokens[current_token_index].spaces_before
-
-			if line.format_tokens[current_token_index].kind == .Colon {
-				append(&colon_tokens, TokenAndLength {format_token = &line.format_tokens[current_token_index], length = lhs_length})
-
-				if len(line.format_tokens) > current_token_index && line.format_tokens[current_token_index + 1].kind != .Eq {
-					append(&type_tokens, TokenAndLength {format_token = &line.format_tokens[current_token_index + 1], length = lhs_length})
-				}
-
-				current_token_index += 1
-				largest_lhs = max(largest_lhs, lhs_length)
-				break
-			}
-		}
-
-		//calcuate the length of the rhs i.e. `[dynamic]int = 123123`
-		for; current_token_index < len(line.format_tokens); current_token_index += 1 {
-
-			rhs_length += len(line.format_tokens[current_token_index].text) + line.format_tokens[current_token_index].spaces_before
-
-			if line.format_tokens[current_token_index].kind == .Eq {
-				append(&equal_tokens, TokenAndLength {format_token = &line.format_tokens[current_token_index], length = rhs_length})
-				largest_rhs = max(largest_rhs, rhs_length)
-				break
-			}
-		}
-
-	}
-
-	//repeating myself, move to sub procedure
-	if p.config.align_style == .Align_On_Colon_And_Equals || !current_typed || current_not_mutable {
-		for colon_token in colon_tokens {
-			colon_token.format_token.spaces_before = largest_lhs - colon_token.length + 1
-		}
-	} else if p.config.align_style == .Align_On_Type_And_Equals {
-		for type_token in type_tokens {
-			type_token.format_token.spaces_before = largest_lhs - type_token.length + 1
-		}
-	}
-
-	if current_typed {
-		for equal_token in equal_tokens {
-			equal_token.format_token.spaces_before = largest_rhs - equal_token.length + 1
-		}
-	} else {
-		for equal_token in equal_tokens {
-			equal_token.format_token.spaces_before = 0
-		}
-	}
-}
-
-align_switch_stmt :: proc(p: ^Printer, index: int) {
-	switch_found := false
-	brace_token: Format_Token
-	brace_line:  int
-
-	found_switch_brace: for line, line_index in p.lines[index:] {
-		for format_token in line.format_tokens {
-			if format_token.kind == .Open_Brace && switch_found {
-				brace_token = format_token
-				brace_line = line_index + index
-				break found_switch_brace
-			} else if format_token.kind == .Open_Brace {
-				break
-			} else if format_token.kind == .Switch {
-				switch_found = true
-			}
-		}
-	}
-
-	if !switch_found {
-		return
-	}
-
-	largest    := 0
-	case_count := 0
-
-	TokenAndLength :: struct {
-		format_token: ^Format_Token,
-		length:       int,
-	}
-
-	format_tokens := make([dynamic]TokenAndLength, 0, brace_token.parameter_count, context.temp_allocator)
-
-	//find all the switch cases that are one lined
-	for line in p.lines[brace_line + 1:] {
-
-		case_found  := false
-		colon_found := false
-		length      := 0
-
-		for format_token, i in line.format_tokens {
-
-			if format_token.kind == .Comment {
-				break
-			}
-
-			//this will only happen if the case is one lined
-			if case_found && colon_found {
-				append(&format_tokens, TokenAndLength {format_token = &line.format_tokens[i], length = length})
-				largest = max(length, largest)
-				break
-			}
-
-			if format_token.kind == .Case {
-				case_found = true
-				case_count += 1
-			} else if format_token.kind == .Colon {
-				colon_found = true
-			}
-
-			length += len(format_token.text) + format_token.spaces_before
-		}
-
-		if case_count >= brace_token.parameter_count {
-			break
-		}
-	}
-
-	for token in format_tokens {
-		token.format_token.spaces_before = largest - token.length + 1
-	}
-
-}
-
-align_enum :: proc(p: ^Printer, index: int) {
-	enum_found := false
-	brace_token: Format_Token
-	brace_line:  int
-
-	found_enum_brace: for line, line_index in p.lines[index:] {
-		for format_token in line.format_tokens {
-			if format_token.kind == .Open_Brace && enum_found {
-				brace_token = format_token
-				brace_line = line_index + index
-				break found_enum_brace
-			} else if format_token.kind == .Open_Brace {
-				break
-			} else if format_token.kind == .Enum {
-				enum_found = true
-			}
-		}
-	}
-
-	if !enum_found {
-		return
-	}
-
-	largest     := 0
-	comma_count := 0
-
-	TokenAndLength :: struct {
-		format_token: ^Format_Token,
-		length:       int,
-	}
-
-	format_tokens := make([dynamic]TokenAndLength, 0, brace_token.parameter_count, context.temp_allocator)
-
-	for line in p.lines[brace_line + 1:] {
-		length := 0
-
-		for format_token, i in line.format_tokens {
-			if format_token.kind == .Comment {
-				break
-			}
-
-			if format_token.kind == .Eq {
-				append(&format_tokens, TokenAndLength {format_token = &line.format_tokens[i], length = length})
-				largest = max(length, largest)
-				break
-			} else if format_token.kind == .Comma {
-				comma_count += 1
-			}
-
-			length += len(format_token.text) + format_token.spaces_before
-		}
-
-		if comma_count >= brace_token.parameter_count {
-			break
-		}
-	}
-
-	for token in format_tokens {
-		token.format_token.spaces_before = largest - token.length + 1
-	}
-
-}
-
-align_struct :: proc(p: ^Printer, index: int) -> int {
-	struct_found := false
-	brace_token: Format_Token
-	brace_line:  int
-
-	found_struct_brace: for line, line_index in p.lines[index:] {
-		for format_token in line.format_tokens {
-			if format_token.kind == .Open_Brace && struct_found {
-				brace_token = format_token
-				brace_line = line_index + index
-				break found_struct_brace
-			} else if format_token.kind == .Open_Brace {
-				break
-			} else if format_token.kind == .Struct {
-				struct_found = true
-			}
-		}
-	}
-
-	if !struct_found {
-		return 0
-	}
-
-	largest     := 0
-	colon_count := 0
-	nested      := false
-	seen_brace  := false
-
-	TokenAndLength :: struct {
-		format_token: ^Format_Token,
-		length:       int,
-	}
-
-	format_tokens := make([]TokenAndLength, brace_token.parameter_count, context.temp_allocator)
-
-	if brace_token.parameter_count == 0 {
-		return 0
-	}
-
-	end_line_index := 0
-
-	for line, line_index in p.lines[brace_line + 1:] {
-		length := 0
-
-		for format_token, i in line.format_tokens {
-
-			//give up on nested structs
-			if format_token.kind == .Comment {
-				break
-			} else if format_token.kind == .Open_Paren {
-				break
-			} else if format_token.kind == .Open_Brace {
-				seen_brace = true
-			} else if format_token.kind == .Close_Brace {
-				seen_brace = false
-			} else if seen_brace {
-				continue
-			}
-
-			if format_token.kind == .Colon {
-				format_tokens[colon_count] = {format_token = &line.format_tokens[i + 1], length = length}
-
-				if format_tokens[colon_count].format_token.kind == .Struct {
-					nested = true
-				}
-
-				colon_count += 1
-				largest = max(length, largest)
-			}
-
-			length += len(format_token.text) + format_token.spaces_before
-		}
-
-		if nested {
-			end_line_index = line_index + brace_line + 1
-		}
-
-		if colon_count >= brace_token.parameter_count {
-			break
-		}
-	}
-
-	//give up aligning nested, it never looks good
-	if nested {
-		for line, line_index in p.lines[end_line_index:] {
-			for format_token in line.format_tokens {
-				if format_token.kind == .Close_Brace {
-					return end_line_index + line_index - index
-				}
-			}
-		}
-	}
-
-	for token in format_tokens {
-		token.format_token.spaces_before = largest - token.length + 1
-	}
-
-	return 0
-}
-
-align_comments :: proc(p: ^Printer) {
-
-	Comment_Align_Info :: struct {
-		length: int,
-		begin:  int,
-		end:    int,
-		depth:  int,
-	}
-
-	comment_infos := make([dynamic]Comment_Align_Info, 0, context.temp_allocator)
-
-	current_info: Comment_Align_Info
-
-	for line, line_index in p.lines {
-		if len(line.format_tokens) <= 0 {
-			continue
-		}
-
-		if .Line_Comment in line.types {
-			if current_info.end + 1 != line_index || current_info.depth != line.depth ||
-			   (current_info.begin == current_info.end && current_info.length == 0) {
-
-				if (current_info.begin != 0 && current_info.end != 0) || current_info.length > 0 {
-					append(&comment_infos, current_info)
-				}
-
-				current_info.begin = line_index
-				current_info.end = line_index
-				current_info.depth = line.depth
-				current_info.length = 0
-			}
-
-			length := 0
-
-			for format_token in line.format_tokens {
-				if format_token.kind == .Comment {
-					current_info.length = max(current_info.length, length)
-					current_info.end = line_index
-				}
-
-				length += format_token.spaces_before + len(format_token.text)
-			}
-		}
-	}
-
-	if (current_info.begin != 0 && current_info.end != 0) || current_info.length > 0 {
-		append(&comment_infos, current_info)
-	}
-
-	for info in comment_infos {
-
-		if info.begin == info.end || info.length == 0 {
-			continue
-		}
-
-		for i := info.begin; i <= info.end; i += 1 {
-			l := p.lines[i]
-
-			length := 0
-
-			for format_token in l.format_tokens {
-				if format_token.kind == .Comment {
-					if len(l.format_tokens) == 1 {
-						l.format_tokens[i].spaces_before = info.length + 1
-					} else {
-						l.format_tokens[i].spaces_before = info.length - length + 1
-					}
-				}
-
-				length += format_token.spaces_before + len(format_token.text)
-			}
-		}
-	}
-}

+ 0 - 1629
core/odin/printer/visit.odin

@@ -1,1629 +0,0 @@
-package odin_printer
-
-import "core:odin/ast"
-import "core:odin/tokenizer"
-import "core:strings"
-import "core:fmt"
-import "core:sort"
-
-//right now the attribute order is not linearly parsed(bug?)
-@(private)
-sort_attribute :: proc(s: ^[dynamic]^ast.Attribute) -> sort.Interface {
-	return sort.Interface {
-		collection = rawptr(s),
-		len = proc(it: sort.Interface) -> int {
-			s := (^[dynamic]^ast.Attribute)(it.collection)
-			return len(s^)
-		},
-		less = proc(it: sort.Interface, i, j: int) -> bool {
-			s := (^[dynamic]^ast.Attribute)(it.collection)
-			return s[i].pos.offset < s[j].pos.offset
-		},
-		swap = proc(it: sort.Interface, i, j: int) {
-			s := (^[dynamic]^ast.Attribute)(it.collection)
-			s[i], s[j] = s[j], s[i]
-		},
-	}
-}
-
-@(private)
-comment_before_position :: proc(p: ^Printer, pos: tokenizer.Pos) -> bool {
-	if len(p.comments) <= p.latest_comment_index {
-		return false
-	}
-
-	comment := p.comments[p.latest_comment_index]
-
-	return comment.pos.offset < pos.offset
-}
-
-@(private)
-next_comment_group :: proc(p: ^Printer) {
-	p.latest_comment_index += 1
-}
-
-@(private)
-push_comment :: proc(p: ^Printer, comment: tokenizer.Token) -> int {
-	if len(comment.text) == 0 {
-		return 0
-	}
-
-	if comment.text[:2] != "/*" {
-		format_token := Format_Token {
-			spaces_before = 1,
-			kind = .Comment,
-			text = comment.text,
-		}
-
-		if len(p.current_line.format_tokens) == 0 {
-			format_token.spaces_before = 0
-		}
-
-		if !p.current_line.used {
-			p.current_line.used = true
-			p.current_line.depth = p.depth
-		}
-
-		append(&p.current_line.format_tokens, format_token)
-		p.last_token = &p.current_line.format_tokens[len(p.current_line.format_tokens) - 1]
-
-		hint_current_line(p, {.Line_Comment})
-
-		return 0
-	} else {
-		builder := strings.builder_make(context.temp_allocator)
-
-		c_len      := len(comment.text)
-		trim_space := true
-
-		multilines: [dynamic]string
-
-		for i := 0; i < len(comment.text); i += 1 {
-			c := comment.text[i]
-
-			if c != ' ' && c != '\t' {
-				trim_space = false
-			}
-
-			switch {
-			case (c == ' ' || c == '\t' || c == '\n') && trim_space:
-				continue
-			case c == '\r' && comment.text[min(c_len - 1, i + 1)] == '\n':
-				append(&multilines, strings.to_string(builder))
-				builder = strings.builder_make(context.temp_allocator)
-				trim_space = true
-				i += 1
-			case c == '\n':
-				append(&multilines, strings.to_string(builder))
-				builder = strings.builder_make(context.temp_allocator)
-				trim_space = true
-			case c == '/' && comment.text[min(c_len - 1, i + 1)] == '*':
-				strings.write_string(&builder, "/*")
-				trim_space = true
-				i += 1
-			case c == '*' && comment.text[min(c_len - 1, i + 1)] == '/':
-				trim_space = true
-				strings.write_string(&builder, "*/")
-				i += 1
-			case:
-				strings.write_byte(&builder, c)
-			}
-		}
-
-		if strings.builder_len(builder) > 0 {
-			append(&multilines, strings.to_string(builder))
-		}
-
-		for line in multilines {
-			format_token := Format_Token {
-				spaces_before = 1,
-				kind = .Comment,
-				text = line,
-			}
-
-			if len(p.current_line.format_tokens) == 0 {
-				format_token.spaces_before = 0
-			}
-
-			if strings.contains(line, "*/") {
-				unindent(p)
-			}
-
-			if !p.current_line.used {
-				p.current_line.used = true
-				p.current_line.depth = p.depth
-			}
-
-			append(&p.current_line.format_tokens, format_token)
-			p.last_token = &p.current_line.format_tokens[len(p.current_line.format_tokens) - 1]
-
-			if strings.contains(line, "/*") {
-				indent(p)
-			}
-
-			newline_position(p, 1)
-		}
-
-		return len(multilines)
-	}
-}
-
-@(private)
-push_comments :: proc(p: ^Printer, pos: tokenizer.Pos) {
-	prev_comment:       ^tokenizer.Token
-	prev_comment_lines: int
-
-	for comment_before_position(p, pos) {
-		comment_group := p.comments[p.latest_comment_index]
-
-		if prev_comment == nil {
-			lines := comment_group.pos.line - p.last_source_position.line
-			set_line(p, p.last_line_index + min(p.config.newline_limit+1, lines))
-		}
-
-		for comment, i in comment_group.list {
-			if prev_comment != nil && p.last_source_position.line != comment.pos.line {
-				newline_position(p, min(p.config.newline_limit+1, comment.pos.line - prev_comment.pos.line - prev_comment_lines))
-			}
-
-			prev_comment_lines = push_comment(p, comment)
-			prev_comment = &comment_group.list[i]
-		}
-
-		next_comment_group(p)
-	}
-
-	if prev_comment != nil {
-		newline_position(p, min(p.config.newline_limit+1, p.source_position.line - prev_comment.pos.line - prev_comment_lines))
-	}
-}
-
-@(private)
-append_format_token :: proc(p: ^Printer, format_token: Format_Token) -> ^Format_Token {
-	format_token := format_token
-
-	if p.last_token != nil && (
-           p.last_token.kind == .Ellipsis ||
-           p.last_token.kind == .Range_Half || p.last_token.kind == .Range_Full ||
-	   p.last_token.kind == .Open_Paren || p.last_token.kind == .Period ||
-	   p.last_token.kind == .Open_Brace || p.last_token.kind == .Open_Bracket) {
-		format_token.spaces_before = 0
-	} else if p.merge_next_token {
-		format_token.spaces_before = 0
-		p.merge_next_token = false
-	} else if p.space_next_token {
-		format_token.spaces_before = 1
-		p.space_next_token = false
-	}
-
-	push_comments(p, p.source_position)
-
-	unwrapped_line := p.current_line
-
-	if !unwrapped_line.used {
-		unwrapped_line.used = true
-		unwrapped_line.depth = p.depth
-	}
-
-	if len(unwrapped_line.format_tokens) == 0 && format_token.spaces_before == 1 {
-		format_token.spaces_before = 0
-	}
-
-	p.last_source_position = p.source_position
-	p.last_line_index = p.current_line_index
-
-	append(&unwrapped_line.format_tokens, format_token)
-	return &unwrapped_line.format_tokens[len(unwrapped_line.format_tokens) - 1]
-}
-
-@(private)
-push_format_token :: proc(p: ^Printer, format_token: Format_Token) {
-	p.last_token = append_format_token(p, format_token)
-}
-
-@(private)
-push_generic_token :: proc(p: ^Printer, kind: tokenizer.Token_Kind, spaces_before: int, value := "") {
-	format_token := Format_Token {
-		spaces_before = spaces_before,
-		kind = kind,
-		text = tokenizer.tokens[kind],
-	}
-
-	if value != "" {
-		format_token.text = value
-	}
-
-	p.last_token = append_format_token(p, format_token)
-}
-
-@(private)
-push_string_token :: proc(p: ^Printer, text: string, spaces_before: int) {
-	format_token := Format_Token {
-		spaces_before = spaces_before,
-		kind = .String,
-		text = text,
-	}
-
-	p.last_token = append_format_token(p, format_token)
-}
-
-@(private)
-push_ident_token :: proc(p: ^Printer, text: string, spaces_before: int) {
-	format_token := Format_Token {
-		spaces_before = spaces_before,
-		kind = .Ident,
-		text = text,
-	}
-
-	p.last_token = append_format_token(p, format_token)
-}
-
-@(private)
-set_source_position :: proc(p: ^Printer, pos: tokenizer.Pos) {
-	p.source_position = pos
-}
-
-@(private)
-move_line :: proc(p: ^Printer, pos: tokenizer.Pos) {
-	move_line_limit(p, pos, p.config.newline_limit+1)
-}
-
-@(private)
-move_line_limit :: proc(p: ^Printer, pos: tokenizer.Pos, limit: int) -> bool {
-	lines := min(pos.line - p.source_position.line, limit)
-
-	if lines < 0 {
-		return false
-	}
-
-	p.source_position = pos
-	p.current_line_index += lines
-	set_line(p, p.current_line_index)
-	return lines > 0
-}
-
-@(private)
-set_line :: proc(p: ^Printer, line: int) -> ^Line {
-	unwrapped_line: ^Line
-
-	if line >= len(p.lines) {
-		for i := len(p.lines); i <= line; i += 1 {
-			new_line: Line
-			new_line.format_tokens = make([dynamic]Format_Token, 0, 50, p.allocator)
-			append(&p.lines, new_line)
-		}
-		unwrapped_line = &p.lines[line]
-	} else {
-		unwrapped_line = &p.lines[line]
-	}
-
-	p.current_line = unwrapped_line
-	p.current_line_index = line
-
-	return unwrapped_line
-}
-
-@(private)
-newline_position :: proc(p: ^Printer, count: int) {
-	p.current_line_index += count
-	set_line(p, p.current_line_index)
-}
-
-@(private)
-indent :: proc(p: ^Printer) {
-	p.depth += 1
-}
-
-@(private)
-unindent :: proc(p: ^Printer) {
-	p.depth -= 1
-}
-
-@(private)
-merge_next_token :: proc(p: ^Printer) {
-	p.merge_next_token = true
-}
-
-@(private)
-space_next_token :: proc(p: ^Printer) {
-	p.space_next_token = true
-}
-
-@(private)
-hint_current_line :: proc(p: ^Printer, hint: Line_Type) {
-	p.current_line.types |= hint
-}
-
-@(private)
-visit_decl :: proc(p: ^Printer, decl: ^ast.Decl, called_in_stmt := false) {
-	if decl == nil {
-		return
-	}
-
-	#partial switch v in decl.derived_stmt {
-	case ^ast.Expr_Stmt:
-		move_line(p, decl.pos)
-		visit_expr(p, v.expr)
-		if p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case ^ast.When_Stmt:
-		visit_stmt(p, cast(^ast.Stmt)decl)
-	case ^ast.Foreign_Import_Decl:
-		if len(v.attributes) > 0 {
-			sort.sort(sort_attribute(&v.attributes))
-			move_line(p, v.attributes[0].pos)
-			visit_attributes(p, v.attributes)
-		}
-
-		move_line(p, decl.pos)
-
-		push_generic_token(p, v.foreign_tok.kind, 0)
-		push_generic_token(p, v.import_tok.kind, 1)
-
-		if v.name != nil {
-			push_ident_token(p, v.name.name, 1)
-		}
-
-		for path in v.fullpaths {
-			push_ident_token(p, path, 0)
-		}
-	case ^ast.Foreign_Block_Decl:
-		if len(v.attributes) > 0 {
-			sort.sort(sort_attribute(&v.attributes))
-			move_line(p, v.attributes[0].pos)
-			visit_attributes(p, v.attributes)
-		}
-
-		move_line(p, decl.pos)
-
-		push_generic_token(p, .Foreign, 0)
-
-		visit_expr(p, v.foreign_library)
-		visit_stmt(p, v.body)
-	case ^ast.Import_Decl:
-		move_line(p, decl.pos)
-
-		if v.name.text != "" {
-			push_generic_token(p, v.import_tok.kind, 1)
-			push_generic_token(p, v.name.kind, 1, v.name.text)
-			push_ident_token(p, v.fullpath, 1)
-		} else {
-			push_generic_token(p, v.import_tok.kind, 1)
-			push_ident_token(p, v.fullpath, 1)
-		}
-
-	case ^ast.Value_Decl:
-		if len(v.attributes) > 0 {
-			sort.sort(sort_attribute(&v.attributes))
-			move_line(p, v.attributes[0].pos)
-			visit_attributes(p, v.attributes)
-		}
-
-		move_line(p, decl.pos)
-
-		if v.is_using {
-			push_generic_token(p, .Using, 0)
-		}
-
-		visit_exprs(p, v.names, {.Add_Comma})
-
-		hint_current_line(p, {.Value_Decl})
-
-		if v.type != nil {
-			if !v.is_mutable {
-				push_generic_token(p, .Colon, 0)
-			} else {
-				push_generic_token(p, .Colon, 0)
-			}
-
-			visit_expr(p, v.type)
-		} else {
-			if !v.is_mutable {
-				push_generic_token(p, .Colon, 1)
-				push_generic_token(p, .Colon, 0)
-			} else {
-				push_generic_token(p, .Colon, 1)
-			}
-		}
-
-		if v.is_mutable && v.type != nil && len(v.values) != 0 {
-			push_generic_token(p, .Eq, 1)
-		} else if v.is_mutable && v.type == nil && len(v.values) != 0 {
-			push_generic_token(p, .Eq, 0)
-		} else if !v.is_mutable && v.type != nil {
-			push_generic_token(p, .Colon, 0)
-		}
-
-		if len(v.values) == 1 {
-			visit_expr(p, v.values[0]) //this is too ensure that one value are never newlined(procs, structs, etc.)
-		} else {
-			visit_exprs(p, v.values, {.Add_Comma})
-		}
-
-		add_semicolon := true
-
-		for value in v.values {
-			#partial switch a in value.derived {
-			case ^ast.Union_Type, ^ast.Enum_Type, ^ast.Struct_Type, ^ast.Bit_Field_Type:
-				add_semicolon = false || called_in_stmt
-			case ^ast.Proc_Lit:
-				add_semicolon = false
-			}
-		}
-
-		if add_semicolon && p.config.semicolons && !p.skip_semicolon {
-			push_generic_token(p, .Semicolon, 0)
-		}
-
-	case:
-		panic(fmt.aprint(decl.derived))
-	}
-}
-
-@(private)
-visit_exprs :: proc(p: ^Printer, list: []^ast.Expr, options := List_Options{}) {
-	if len(list) == 0 {
-		return
-	}
-
-	// we have to newline the expressions to respect the source
-	for expr, i in list {
-		// Don't move the first expression, it looks bad
-		if i != 0 && .Enforce_Newline in options {
-			newline_position(p, 1)
-		} else if i != 0 {
-			move_line_limit(p, expr.pos, 1)
-		}
-
-		visit_expr(p, expr, options)
-
-		if (i != len(list) - 1 || .Trailing in options) && .Add_Comma in options {
-			push_generic_token(p, .Comma, 0)
-		}
-	}
-
-	if len(list) > 1 && .Enforce_Newline in options {
-		newline_position(p, 1)
-	}
-}
-
-@(private)
-visit_bit_field_fields :: proc(p: ^Printer, list: []^ast.Bit_Field_Field, options := List_Options{}) {
-	if len(list) == 0 {
-		return
-	}
-
-	// we have to newline the expressions to respect the source
-	for v, i in list {
-		// Don't move the first expression, it looks bad
-		if i != 0 && .Enforce_Newline in options {
-			newline_position(p, 1)
-		} else if i != 0 {
-			move_line_limit(p, v.pos, 1)
-		}
-
-		visit_expr(p, v.name, options)
-		push_generic_token(p, .Colon, 0)
-		visit_expr(p, v.type, options)
-		push_generic_token(p, .Or, 1)
-		visit_expr(p, v.bit_size, options)
-
-		if (i != len(list) - 1 || .Trailing in options) && .Add_Comma in options {
-			push_generic_token(p, .Comma, 0)
-		}
-	}
-
-	if len(list) > 1 && .Enforce_Newline in options {
-		newline_position(p, 1)
-	}
-}
-
-@(private)
-visit_attributes :: proc(p: ^Printer, attributes: [dynamic]^ast.Attribute) {
-	if len(attributes) == 0 {
-		return
-	}
-
-	for attribute in attributes {
-		move_line_limit(p, attribute.pos, 1)
-
-		push_generic_token(p, .At, 0)
-		push_generic_token(p, .Open_Paren, 0)
-
-		visit_exprs(p, attribute.elems, {.Add_Comma})
-
-		push_generic_token(p, .Close_Paren, 0)
-	}
-}
-
-@(private)
-visit_stmt :: proc(p: ^Printer, stmt: ^ast.Stmt, block_type: Block_Type = .Generic, empty_block := false, block_stmt := false) {
-	if stmt == nil {
-		return
-	}
-
-
-	switch v in stmt.derived_stmt {
-	case ^ast.Bad_Stmt:
-	case ^ast.Bad_Decl:
-	case ^ast.Package_Decl:
-
-	case ^ast.Empty_Stmt:
-		push_generic_token(p, .Semicolon, 0)
-	case ^ast.Tag_Stmt:
-		push_generic_token(p, .Hash, 1)
-		push_generic_token(p, v.op.kind, 1, v.op.text)
-		visit_stmt(p, v.stmt)
-
-
-	case ^ast.Import_Decl:
-		visit_decl(p, cast(^ast.Decl)stmt, true)
-		return
-	case ^ast.Value_Decl:
-		visit_decl(p, cast(^ast.Decl)stmt, true)
-		return
-	case ^ast.Foreign_Import_Decl:
-		visit_decl(p, cast(^ast.Decl)stmt, true)
-		return
-	case ^ast.Foreign_Block_Decl:
-		visit_decl(p, cast(^ast.Decl)stmt, true)
-		return
-
-	case ^ast.Using_Stmt:
-		move_line(p, v.pos)
-
-		push_generic_token(p, .Using, 1)
-
-		visit_exprs(p, v.list, {.Add_Comma})
-
-		if p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case ^ast.Block_Stmt:
-		move_line(p, v.pos)
-
-		if v.pos.line == v.end.line {
-			if !empty_block {
-				push_generic_token(p, .Open_Brace, 1)
-			}
-
-			set_source_position(p, v.pos)
-
-			visit_block_stmts(p, v.stmts, len(v.stmts) > 1 && p.config.split_multiple_stmts)
-
-			set_source_position(p, v.end)
-
-			if !empty_block {
-				push_generic_token(p, .Close_Brace, 0)
-			}
-		} else {
-			if !empty_block {
-				visit_begin_brace(p, v.pos, block_type, len(v.stmts))
-			}
-
-			set_source_position(p, v.pos)
-
-			visit_block_stmts(p, v.stmts, len(v.stmts) > 1 && p.config.split_multiple_stmts)
-
-			if !empty_block {
-				visit_end_brace(p, v.end)
-			}
-		}
-	case ^ast.If_Stmt:
-		move_line(p, v.pos)
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-			push_generic_token(p, .Colon, 0)
-		}
-
-		push_generic_token(p, .If, 1)
-
-		hint_current_line(p, {.If})
-
-		if v.init != nil {
-			p.skip_semicolon = true
-			visit_stmt(p, v.init)
-			p.skip_semicolon = false
-			push_generic_token(p, .Semicolon, 0)
-		}
-
-		visit_expr(p, v.cond)
-
-		uses_do := false
-
-		if check_stmt, ok := v.body.derived.(^ast.Block_Stmt); ok && check_stmt.uses_do {
-			uses_do = true
-		}
-
-		if uses_do && !p.config.convert_do {
-			push_generic_token(p, .Do, 1)
-			visit_stmt(p, v.body, .If_Stmt, true)
-		} else {
-			if uses_do {
-				newline_position(p, 1)
-			}
-
-			set_source_position(p, v.body.pos)
-
-			visit_stmt(p, v.body, .If_Stmt)
-
-			set_source_position(p, v.body.end)
-		}
-
-		if v.else_stmt != nil {
-
-			if p.config.brace_style == .Allman || p.config.brace_style == .Stroustrup {
-				newline_position(p, 1)
-			}
-
-			push_generic_token(p, .Else, 1)
-
-			set_source_position(p, v.else_stmt.pos)
-
-			visit_stmt(p, v.else_stmt)
-		}
-	case ^ast.Switch_Stmt:
-		move_line(p, v.pos)
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-			push_generic_token(p, .Colon, 0)
-		}
-
-		if v.partial {
-			push_ident_token(p, "#partial", 1)
-		}
-
-		push_generic_token(p, .Switch, 1)
-
-		hint_current_line(p, {.Switch_Stmt})
-
-		if v.init != nil {
-			p.skip_semicolon = true
-			visit_stmt(p, v.init)
-			p.skip_semicolon = false
-		}
-
-		if v.init != nil && v.cond != nil {
-			push_generic_token(p, .Semicolon, 0)
-		}
-
-		visit_expr(p, v.cond)
-		visit_stmt(p, v.body)
-	case ^ast.Case_Clause:
-		move_line(p, v.pos)
-
-		if !p.config.indent_cases {
-			unindent(p)
-		}
-
-		push_generic_token(p, .Case, 0)
-
-		if v.list != nil {
-			visit_exprs(p, v.list, {.Add_Comma})
-		}
-
-		push_generic_token(p, v.terminator.kind, 0)
-
-		indent(p)
-
-		visit_block_stmts(p, v.body)
-
-		unindent(p)
-
-		if !p.config.indent_cases {
-			indent(p)
-		}
-	case ^ast.Type_Switch_Stmt:
-		move_line(p, v.pos)
-
-		hint_current_line(p, {.Switch_Stmt})
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-			push_generic_token(p, .Colon, 0)
-		}
-
-		if v.partial {
-			push_ident_token(p, "#partial", 1)
-		}
-
-		push_generic_token(p, .Switch, 1)
-
-		visit_stmt(p, v.tag)
-		visit_stmt(p, v.body)
-	case ^ast.Assign_Stmt:
-		move_line(p, v.pos)
-
-		hint_current_line(p, {.Assign})
-
-		visit_exprs(p, v.lhs, {.Add_Comma})
-
-		push_generic_token(p, v.op.kind, 1)
-
-		visit_exprs(p, v.rhs, {.Add_Comma})
-
-		if block_stmt && p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case ^ast.Expr_Stmt:
-		move_line(p, v.pos)
-		visit_expr(p, v.expr)
-		if block_stmt && p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case ^ast.For_Stmt:
-		// this should be simplified
-		move_line(p, v.pos)
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-			push_generic_token(p, .Colon, 0)
-		}
-
-		push_generic_token(p, .For, 1)
-
-		hint_current_line(p, {.For})
-
-		if v.init != nil {
-			p.skip_semicolon = true
-			visit_stmt(p, v.init)
-			p.skip_semicolon = false
-			push_generic_token(p, .Semicolon, 0)
-		} else if v.post != nil {
-			push_generic_token(p, .Semicolon, 0)
-		}
-
-		if v.cond != nil {
-			move_line(p, v.cond.pos)
-			visit_expr(p, v.cond)
-		}
-
-		if v.post != nil {
-			push_generic_token(p, .Semicolon, 0)
-			move_line(p, v.post.pos)
-			visit_stmt(p, v.post)
-		} else if v.post == nil && v.cond != nil && v.init != nil {
-			push_generic_token(p, .Semicolon, 0)
-		}
-
-		visit_stmt(p, v.body)
-
-	case ^ast.Inline_Range_Stmt:
-		move_line(p, v.pos)
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-			push_generic_token(p, .Colon, 0)
-		}
-
-		push_ident_token(p, "#unroll", 0)
-
-		push_generic_token(p, .For, 1)
-
-		hint_current_line(p, {.For})
-
-		visit_expr(p, v.val0)
-
-		if v.val1 != nil {
-			push_generic_token(p, .Comma, 0)
-			visit_expr(p, v.val1)
-		}
-
-		push_generic_token(p, .In, 1)
-
-		visit_expr(p, v.expr)
-		visit_stmt(p, v.body)
-
-	case ^ast.Range_Stmt:
-		move_line(p, v.pos)
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-			push_generic_token(p, .Colon, 0)
-		}
-
-		push_generic_token(p, .For, 1)
-
-		hint_current_line(p, {.For})
-
-		if len(v.vals) >= 1 {
-			visit_expr(p, v.vals[0])
-		}
-
-		if len(v.vals) >= 2 {
-			push_generic_token(p, .Comma, 0)
-			visit_expr(p, v.vals[1])
-		}
-
-		push_generic_token(p, .In, 1)
-
-		visit_expr(p, v.expr)
-
-		visit_stmt(p, v.body)
-	case ^ast.Return_Stmt:
-		move_line(p, v.pos)
-
-		push_generic_token(p, .Return, 1)
-
-		if v.results != nil {
-			visit_exprs(p, v.results, {.Add_Comma})
-		}
-
-		if block_stmt && p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case ^ast.Defer_Stmt:
-		move_line(p, v.pos)
-		push_generic_token(p, .Defer, 0)
-
-		visit_stmt(p, v.stmt)
-
-		if p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case ^ast.When_Stmt:
-		move_line(p, v.pos)
-		push_generic_token(p, .When, 1)
-		visit_expr(p, v.cond)
-
-		visit_stmt(p, v.body)
-
-		if v.else_stmt != nil {
-
-			if p.config.brace_style == .Allman {
-				newline_position(p, 1)
-			}
-
-			push_generic_token(p, .Else, 1)
-
-			set_source_position(p, v.else_stmt.pos)
-
-			visit_stmt(p, v.else_stmt)
-		}
-
-	case ^ast.Branch_Stmt:
-		move_line(p, v.pos)
-
-		push_generic_token(p, v.tok.kind, 0)
-
-		if v.label != nil {
-			visit_expr(p, v.label)
-		}
-
-		if p.config.semicolons {
-			push_generic_token(p, .Semicolon, 0)
-		}
-	case:
-		panic(fmt.aprint(stmt.derived))
-	}
-
-	set_source_position(p, stmt.end)
-}
-
-@(private)
-push_where_clauses :: proc(p: ^Printer, clauses: []^ast.Expr) {
-	if len(clauses) == 0 {
-		return
-	}
-
-	// TODO(bill): This is not outputting correctly at all
-
-	move_line(p, clauses[0].pos)
-	push_generic_token(p, .Where, 1)
-
-	force_newline := false
-
-	for expr, i in clauses {
-		// Don't move the first expression, it looks bad
-		if i != 0 && i != len(clauses)-1 && force_newline {
-			newline_position(p, 1)
-		} else if i != 0 {
-			move_line_limit(p, expr.pos, 1)
-		}
-
-		visit_expr(p, expr)
-
-		if i != len(clauses) - 1 {
-			push_generic_token(p, .Comma, 0)
-		}
-	}
-
-	if len(clauses) > 1 && force_newline {
-		newline_position(p, 1)
-	}
-}
-
-@(private)
-push_poly_params :: proc(p: ^Printer, poly_params: ^ast.Field_List) {
-	if poly_params != nil {
-		push_generic_token(p, .Open_Paren, 0)
-		visit_field_list(p, poly_params, {.Add_Comma, .Enforce_Poly_Names})
-		push_generic_token(p, .Close_Paren, 0)
-	}
-}
-
-
-@(private)
-visit_expr :: proc(p: ^Printer, expr: ^ast.Expr, options := List_Options{}) {
-	if expr == nil {
-		return
-	}
-
-	set_source_position(p, expr.pos)
-
-	switch v in expr.derived_expr {
-	case ^ast.Bad_Expr:
-
-	case ^ast.Tag_Expr:
-		push_generic_token(p, .Hash, 1)
-		push_generic_token(p, v.op.kind, 1, v.op.text)
-		visit_expr(p, v.expr)
-
-	case ^ast.Inline_Asm_Expr:
-		push_generic_token(p, v.tok.kind, 1, v.tok.text)
-
-		push_generic_token(p, .Open_Paren, 1)
-		visit_exprs(p, v.param_types, {.Add_Comma})
-		push_generic_token(p, .Close_Paren, 0)
-
-		push_generic_token(p, .Sub, 1)
-		push_generic_token(p, .Gt, 0)
-
-		visit_expr(p, v.return_type)
-
-		push_generic_token(p, .Open_Brace, 1)
-		visit_expr(p, v.asm_string)
-		push_generic_token(p, .Comma, 0)
-		visit_expr(p, v.constraints_string)
-		push_generic_token(p, .Close_Brace, 0)
-	case ^ast.Undef:
-		push_generic_token(p, .Undef, 1)
-	case ^ast.Auto_Cast:
-		push_generic_token(p, v.op.kind, 1)
-		visit_expr(p, v.expr)
-	case ^ast.Ternary_If_Expr:
-		visit_expr(p, v.x)
-		push_generic_token(p, v.op1.kind, 1)
-		visit_expr(p, v.cond)
-		push_generic_token(p, v.op2.kind, 1)
-		visit_expr(p, v.y)
-	case ^ast.Ternary_When_Expr:
-		visit_expr(p, v.x)
-		push_generic_token(p, v.op1.kind, 1)
-		visit_expr(p, v.cond)
-		push_generic_token(p, v.op2.kind, 1)
-		visit_expr(p, v.y)
-	case ^ast.Or_Else_Expr:
-		visit_expr(p, v.x)
-		push_generic_token(p, v.token.kind, 1)
-		visit_expr(p, v.y)
-	case ^ast.Or_Return_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, v.token.kind, 1)
-	case ^ast.Or_Branch_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, v.token.kind, 1)
-		if v.label != nil {
-			visit_expr(p, v.label)
-		}
-
-	case ^ast.Selector_Call_Expr:
-		visit_expr(p, v.call.expr)
-		push_generic_token(p, .Open_Paren, 1)
-		visit_exprs(p, v.call.args, {.Add_Comma})
-		push_generic_token(p, .Close_Paren, 0)
-	case ^ast.Ellipsis:
-		push_generic_token(p, .Ellipsis, 1)
-		visit_expr(p, v.expr)
-	case ^ast.Relative_Type:
-		visit_expr(p, v.tag)
-		visit_expr(p, v.type)
-	case ^ast.Slice_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, .Open_Bracket, 0)
-		visit_expr(p, v.low)
-		push_generic_token(p, v.interval.kind, 0)
-		if v.high != nil {
-			merge_next_token(p)
-			visit_expr(p, v.high)
-		}
-		push_generic_token(p, .Close_Bracket, 0)
-	case ^ast.Ident:
-		if .Enforce_Poly_Names in options {
-			push_generic_token(p, .Dollar, 1)
-			push_ident_token(p, v.name, 0)
-		} else {
-			push_ident_token(p, v.name, 1)
-		}
-	case ^ast.Deref_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, v.op.kind, 0)
-	case ^ast.Type_Cast:
-		push_generic_token(p, v.tok.kind, 1)
-		push_generic_token(p, .Open_Paren, 0)
-		visit_expr(p, v.type)
-		push_generic_token(p, .Close_Paren, 0)
-		merge_next_token(p)
-		visit_expr(p, v.expr)
-	case ^ast.Basic_Directive:
-		push_generic_token(p, v.tok.kind, 1)
-		push_ident_token(p, v.name, 0)
-	case ^ast.Distinct_Type:
-		push_generic_token(p, .Distinct, 1)
-		visit_expr(p, v.type)
-	case ^ast.Dynamic_Array_Type:
-		visit_expr(p, v.tag)
-		push_generic_token(p, .Open_Bracket, 1)
-		push_generic_token(p, .Dynamic, 0)
-		push_generic_token(p, .Close_Bracket, 0)
-		merge_next_token(p)
-		visit_expr(p, v.elem)
-	case ^ast.Bit_Set_Type:
-		push_generic_token(p, .Bit_Set, 1)
-		push_generic_token(p, .Open_Bracket, 0)
-
-		visit_expr(p, v.elem)
-
-		if v.underlying != nil {
-			push_generic_token(p, .Semicolon, 0)
-			visit_expr(p, v.underlying)
-		}
-
-		push_generic_token(p, .Close_Bracket, 0)
-	case ^ast.Union_Type:
-		push_generic_token(p, .Union, 1)
-
-		push_poly_params(p, v.poly_params)
-
-		switch v.kind {
-		case .Normal:
-		case .maybe:      push_ident_token(p, "#maybe", 1)
-		case .no_nil:     push_ident_token(p, "#no_nil", 1)
-		case .shared_nil: push_ident_token(p, "#shared_nil", 1)
-		}
-
-		push_where_clauses(p, v.where_clauses)
-
-		if v.variants != nil && (len(v.variants) == 0 || v.pos.line == v.end.line) {
-			push_generic_token(p, .Open_Brace, 1)
-			visit_exprs(p, v.variants, {.Add_Comma})
-			push_generic_token(p, .Close_Brace, 0)
-		} else {
-			visit_begin_brace(p, v.pos, .Generic)
-			newline_position(p, 1)
-			set_source_position(p, v.variants[0].pos)
-			visit_exprs(p, v.variants, {.Add_Comma, .Trailing})
-			visit_end_brace(p, v.end)
-		}
-	case ^ast.Enum_Type:
-		push_generic_token(p, .Enum, 1)
-
-		hint_current_line(p, {.Enum})
-
-		if v.base_type != nil {
-			visit_expr(p, v.base_type)
-		}
-
-		if v.fields != nil && (len(v.fields) == 0 || v.pos.line == v.end.line) {
-			push_generic_token(p, .Open_Brace, 1)
-			visit_exprs(p, v.fields, {.Add_Comma})
-			push_generic_token(p, .Close_Brace, 0)
-		} else {
-			visit_begin_brace(p, v.pos, .Generic, len(v.fields))
-			newline_position(p, 1)
-			set_source_position(p, v.fields[0].pos)
-			visit_exprs(p, v.fields, {.Add_Comma, .Trailing, .Enforce_Newline})
-			set_source_position(p, v.end)
-			visit_end_brace(p, v.end)
-		}
-
-		set_source_position(p, v.end)
-	case ^ast.Struct_Type:
-		push_generic_token(p, .Struct, 1)
-
-		hint_current_line(p, {.Struct})
-
-		push_poly_params(p, v.poly_params)
-
-		if v.is_packed {
-			push_ident_token(p, "#packed", 1)
-		}
-
-		if v.is_raw_union {
-			push_ident_token(p, "#raw_union", 1)
-		}
-
-		if v.align != nil {
-			push_ident_token(p, "#align", 1)
-			visit_expr(p, v.align)
-		}
-
-		push_where_clauses(p, v.where_clauses)
-
-		if v.fields != nil && (len(v.fields.list) == 0 || v.pos.line == v.end.line) {
-			push_generic_token(p, .Open_Brace, 1)
-			set_source_position(p, v.fields.pos)
-			visit_field_list(p, v.fields, {.Add_Comma})
-			push_generic_token(p, .Close_Brace, 0)
-		} else if v.fields != nil {
-			visit_begin_brace(p, v.pos, .Generic, len(v.fields.list))
-			set_source_position(p, v.fields.pos)
-			visit_field_list(p, v.fields, {.Add_Comma, .Trailing, .Enforce_Newline})
-			visit_end_brace(p, v.end)
-		}
-
-		set_source_position(p, v.end)
-	case ^ast.Proc_Lit:
-		switch v.inlining {
-		case .None:
-		case .Inline:
-			push_ident_token(p, "#force_inline", 0)
-		case .No_Inline:
-			push_ident_token(p, "#force_no_inline", 0)
-		}
-
-		visit_proc_type(p, v.type, true)
-
-		push_where_clauses(p, v.where_clauses)
-
-		if v.body != nil {
-			set_source_position(p, v.body.pos)
-			visit_stmt(p, v.body, .Proc)
-		} else {
-			push_generic_token(p, .Undef, 1)
-		}
-	case ^ast.Proc_Type:
-		visit_proc_type(p, v)
-	case ^ast.Basic_Lit:
-		push_generic_token(p, v.tok.kind, 1, v.tok.text)
-	case ^ast.Binary_Expr:
-		visit_binary_expr(p, v)
-	case ^ast.Implicit_Selector_Expr:
-		push_generic_token(p, .Period, 1)
-		push_ident_token(p, v.field.name, 0)
-	case ^ast.Call_Expr:
-		visit_expr(p, v.expr)
-
-		push_format_token(p,
-			Format_Token {
-				kind = .Open_Paren,
-				type = .Call,
-				text = "(",
-			},
-		)
-
-		hint_current_line(p, {.Call})
-
-		visit_call_exprs(p, v.args, v.ellipsis.kind == .Ellipsis)
-		push_generic_token(p, .Close_Paren, 0)
-	case ^ast.Typeid_Type:
-		push_generic_token(p, .Typeid, 1)
-
-		if v.specialization != nil {
-			push_generic_token(p, .Quo, 0)
-			visit_expr(p, v.specialization)
-		}
-	case ^ast.Selector_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, v.op.kind, 0)
-		visit_expr(p, v.field)
-	case ^ast.Paren_Expr:
-		push_generic_token(p, .Open_Paren, 1)
-		visit_expr(p, v.expr)
-		push_generic_token(p, .Close_Paren, 0)
-	case ^ast.Index_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, .Open_Bracket, 0)
-		visit_expr(p, v.index)
-		push_generic_token(p, .Close_Bracket, 0)
-	case ^ast.Matrix_Index_Expr:
-		visit_expr(p, v.expr)
-		push_generic_token(p, .Open_Bracket, 0)
-		visit_expr(p, v.row_index)
-		push_generic_token(p, .Comma, 0)
-		visit_expr(p, v.column_index)
-		push_generic_token(p, .Close_Bracket, 0)
-	case ^ast.Proc_Group:
-		push_generic_token(p, v.tok.kind, 1)
-
-		if len(v.args) != 0 && v.pos.line != v.args[len(v.args) - 1].pos.line {
-			visit_begin_brace(p, v.pos, .Generic)
-			newline_position(p, 1)
-			set_source_position(p, v.args[0].pos)
-			visit_exprs(p, v.args, {.Add_Comma, .Trailing})
-			visit_end_brace(p, v.end)
-		} else {
-			push_generic_token(p, .Open_Brace, 0)
-			visit_exprs(p, v.args, {.Add_Comma})
-			push_generic_token(p, .Close_Brace, 0)
-		}
-
-	case ^ast.Comp_Lit:
-		if v.type != nil {
-			visit_expr(p, v.type)
-		}
-
-		if len(v.elems) != 0 && v.pos.line != v.elems[len(v.elems) - 1].pos.line {
-			visit_begin_brace(p, v.pos, .Comp_Lit, 0)
-			newline_position(p, 1)
-			set_source_position(p, v.elems[0].pos)
-			visit_exprs(p, v.elems, {.Add_Comma, .Trailing})
-			visit_end_brace(p, v.end)
-		} else {
-			push_generic_token(p, .Open_Brace, 0 if v.type != nil else 1)
-			visit_exprs(p, v.elems, {.Add_Comma})
-			push_generic_token(p, .Close_Brace, 0)
-		}
-
-	case ^ast.Unary_Expr:
-		push_generic_token(p, v.op.kind, 1)
-		merge_next_token(p)
-		visit_expr(p, v.expr)
-	case ^ast.Field_Value:
-		visit_expr(p, v.field)
-		push_generic_token(p, .Eq, 1)
-		visit_expr(p, v.value)
-	case ^ast.Type_Assertion:
-		visit_expr(p, v.expr)
-
-		if unary, ok := v.type.derived.(^ast.Unary_Expr); ok && unary.op.text == "?" {
-			push_generic_token(p, .Period, 0)
-			visit_expr(p, v.type)
-		} else {
-			push_generic_token(p, .Period, 0)
-			push_generic_token(p, .Open_Paren, 0)
-			visit_expr(p, v.type)
-			push_generic_token(p, .Close_Paren, 0)
-		}
-
-	case ^ast.Pointer_Type:
-		push_generic_token(p, .Pointer, 1)
-		merge_next_token(p)
-		visit_expr(p, v.elem)
-	case ^ast.Implicit:
-		push_generic_token(p, v.tok.kind, 1)
-	case ^ast.Poly_Type:
-		push_generic_token(p, .Dollar, 1)
-		merge_next_token(p)
-		visit_expr(p, v.type)
-
-		if v.specialization != nil {
-			push_generic_token(p, .Quo, 0)
-			merge_next_token(p)
-			visit_expr(p, v.specialization)
-		}
-	case ^ast.Array_Type:
-		visit_expr(p, v.tag)
-		push_generic_token(p, .Open_Bracket, 1)
-		visit_expr(p, v.len)
-		push_generic_token(p, .Close_Bracket, 0)
-		merge_next_token(p)
-		visit_expr(p, v.elem)
-	case ^ast.Map_Type:
-		push_generic_token(p, .Map, 1)
-		push_generic_token(p, .Open_Bracket, 0)
-		visit_expr(p, v.key)
-		push_generic_token(p, .Close_Bracket, 0)
-		merge_next_token(p)
-		visit_expr(p, v.value)
-	case ^ast.Helper_Type:
-		visit_expr(p, v.type)
-	case ^ast.Multi_Pointer_Type:
-		push_generic_token(p, .Open_Bracket, 1)
-		push_generic_token(p, .Pointer, 0)
-		push_generic_token(p, .Close_Bracket, 0)
-		visit_expr(p, v.elem)
-	case ^ast.Matrix_Type:
-		push_generic_token(p, .Matrix, 1)
-		push_generic_token(p, .Open_Bracket, 0)
-		visit_expr(p, v.row_count)
-		push_generic_token(p, .Comma, 0)
-		visit_expr(p, v.column_count)
-		push_generic_token(p, .Close_Bracket, 0)
-		visit_expr(p, v.elem)
-	case ^ast.Bit_Field_Type:
-		push_generic_token(p, .Bit_Field, 1)
-
-		visit_expr(p, v.backing_type)
-
-		if len(v.fields) == 0 || v.pos.line == v.close.line {
-			push_generic_token(p, .Open_Brace, 1)
-			visit_bit_field_fields(p, v.fields, {.Add_Comma})
-			push_generic_token(p, .Close_Brace, 0)
-		} else {
-			visit_begin_brace(p, v.pos, .Generic, len(v.fields))
-			newline_position(p, 1)
-			set_source_position(p, v.fields[0].pos)
-			visit_bit_field_fields(p, v.fields, {.Add_Comma, .Trailing, .Enforce_Newline})
-			set_source_position(p, v.close)
-			visit_end_brace(p, v.close)
-		}
-
-		set_source_position(p, v.close)
-	case:
-		panic(fmt.aprint(expr.derived))
-	}
-}
-
-visit_begin_brace :: proc(p: ^Printer, begin: tokenizer.Pos, type: Block_Type, count := 0, same_line_spaces_before := 1) {
-	set_source_position(p, begin)
-
-	newline_braced := p.config.brace_style == .Allman
-	newline_braced |= p.config.brace_style == .K_And_R && type == .Proc
-	newline_braced &= p.config.brace_style != ._1TBS
-
-	format_token := Format_Token {
-		kind = .Open_Brace,
-		parameter_count = count,
-		text = "{",
-	}
-
-	if newline_braced {
-		newline_position(p, 1)
-		push_format_token(p, format_token)
-		indent(p)
-	} else {
-		format_token.spaces_before = same_line_spaces_before
-		push_format_token(p, format_token)
-		indent(p)
-	}
-}
-
-visit_end_brace :: proc(p: ^Printer, end: tokenizer.Pos) {
-	move_line(p, end)
-	push_generic_token(p, .Close_Brace, 0)
-	unindent(p)
-	p.current_line.depth = p.depth
-}
-
-visit_block_stmts :: proc(p: ^Printer, stmts: []^ast.Stmt, split := false) {
-	for stmt, i in stmts {
-		visit_stmt(p, stmt, .Generic, false, true)
-
-		if split && i != len(stmts) - 1 && stmt.pos.line == stmts[i + 1].pos.line {
-			newline_position(p, 1)
-		}
-	}
-}
-
-List_Option :: enum u8 {
-	Add_Comma,
-	Trailing,
-	Enforce_Newline,
-	Enforce_Poly_Names,
-}
-
-List_Options :: distinct bit_set[List_Option]
-
-visit_field_list :: proc(p: ^Printer, list: ^ast.Field_List, options := List_Options{}) {
-	if list.list == nil {
-		return
-	}
-
-	for field, i in list.list {
-		if !move_line_limit(p, field.pos, 1) && .Enforce_Newline in options {
-			newline_position(p, 1)
-		}
-
-		if .Using in field.flags {
-			push_generic_token(p, .Using, 1)
-		}
-
-		name_options := List_Options{.Add_Comma}
-		if .Enforce_Poly_Names in options {
-			name_options += {.Enforce_Poly_Names}
-		}
-
-		visit_exprs(p, field.names, name_options)
-
-		if field.type != nil {
-			if len(field.names) != 0 {
-				push_generic_token(p, .Colon, 0)
-			}
-			visit_expr(p, field.type)
-		} else {
-			push_generic_token(p, .Colon, 1)
-			push_generic_token(p, .Eq, 0)
-			visit_expr(p, field.default_value)
-		}
-
-		if field.tag.text != "" {
-			push_generic_token(p, field.tag.kind, 1, field.tag.text)
-		}
-
-		if (i != len(list.list) - 1 || .Trailing in options) && .Add_Comma in options {
-			push_generic_token(p, .Comma, 0)
-		}
-	}
-}
-
-visit_proc_type :: proc(p: ^Printer, proc_type: ^ast.Proc_Type, is_proc_lit := false) {
-	if is_proc_lit {
-		push_format_token(p, Format_Token {
-			kind = .Proc,
-			type = .Proc_Lit,
-			text = "proc",
-			spaces_before = 1,
-		})
-	} else {
-		push_format_token(p, Format_Token {
-			kind = .Proc,
-			text = "proc",
-			spaces_before = 1,
-		})
-	}
-
-	explicit_calling := false
-
-	if v, ok := proc_type.calling_convention.(string); ok {
-		explicit_calling = true
-		push_string_token(p, v, 1)
-	}
-
-	if explicit_calling {
-		push_generic_token(p, .Open_Paren, 1)
-	} else {
-		push_generic_token(p, .Open_Paren, 0)
-	}
-
-	visit_signature_list(p, proc_type.params, false)
-
-	push_generic_token(p, .Close_Paren, 0)
-
-	if proc_type.results != nil {
-		push_generic_token(p, .Sub, 1)
-		push_generic_token(p, .Gt, 0)
-
-		use_parens := false
-
-		if len(proc_type.results.list) > 1 {
-			use_parens = true
-		} else if len(proc_type.results.list) == 1 {
-
-			for name in proc_type.results.list[0].names {
-				if ident, ok := name.derived.(^ast.Ident); ok {
-					if ident.name != "_" {
-						use_parens = true
-					}
-				}
-			}
-		}
-
-		if use_parens {
-			push_generic_token(p, .Open_Paren, 1)
-			visit_signature_list(p, proc_type.results)
-			push_generic_token(p, .Close_Paren, 0)
-		} else {
-			visit_signature_list(p, proc_type.results)
-		}
-	}
-}
-
-visit_binary_expr :: proc(p: ^Printer, binary: ^ast.Binary_Expr) {
-	move_line(p, binary.left.pos)
-
-	if v, ok := binary.left.derived.(^ast.Binary_Expr); ok {
-		visit_binary_expr(p, v)
-	} else {
-		visit_expr(p, binary.left)
-	}
-
-	either_implicit_selector := false
-	if _, lok := binary.left.derived.(^ast.Implicit_Selector_Expr); lok {
-		either_implicit_selector = true
-	} else if _, rok := binary.right.derived.(^ast.Implicit_Selector_Expr); rok {
-		either_implicit_selector = true
-	}
-
-	#partial switch binary.op.kind {
-	case .Ellipsis:
-		push_generic_token(p, binary.op.kind, 1 if either_implicit_selector else 0,
-		                   tokenizer.tokens[tokenizer.Token_Kind.Range_Full])
-	case .Range_Half, .Range_Full:
-		push_generic_token(p, binary.op.kind, 1 if either_implicit_selector else 0)
-	case:
-		push_generic_token(p, binary.op.kind, 1)
-	}
-
-	move_line(p, binary.right.pos)
-
-
-	if v, ok := binary.right.derived.(^ast.Binary_Expr); ok {
-		visit_binary_expr(p, v)
-	} else {
-		visit_expr(p, binary.right)
-	}
-}
-
-visit_call_exprs :: proc(p: ^Printer, list: []^ast.Expr, ellipsis := false) {
-	if len(list) == 0 {
-		return
-	}
-
-	// all the expression are on the line
-	if list[0].pos.line == list[len(list) - 1].pos.line {
-		for expr, i in list {
-			if i == len(list) - 1 && ellipsis {
-				push_generic_token(p, .Ellipsis, 0)
-			}
-
-			visit_expr(p, expr)
-
-			if i != len(list) - 1 {
-				push_generic_token(p, .Comma, 0)
-			}
-		}
-	} else {
-		for expr, i in list {
-			// we have to newline the expressions to respect the source
-			move_line_limit(p, expr.pos, 1)
-
-			if i == len(list) - 1 && ellipsis {
-				push_generic_token(p, .Ellipsis, 0)
-			}
-
-			visit_expr(p, expr)
-
-			if i != len(list) - 1 {
-				push_generic_token(p, .Comma, 0)
-			}
-		}
-	}
-}
-
-visit_signature_list :: proc(p: ^Printer, list: ^ast.Field_List, remove_blank := true) {
-	if list.list == nil {
-		return
-	}
-
-	for field, i in list.list {
-		if i != 0 {
-			move_line_limit(p, field.pos, 1)
-		}
-
-		if .Using in field.flags {
-			push_generic_token(p, .Using, 0)
-		}
-
-		named := false
-
-		for name in field.names {
-			if ident, ok := name.derived.(^ast.Ident); ok {
-				//for some reason the parser uses _ to mean empty
-				if ident.name != "_" || !remove_blank {
-					named = true
-				}
-			} else {
-				//alternative is poly names
-				named = true
-			}
-		}
-
-		if named {
-			visit_exprs(p, field.names, {.Add_Comma})
-
-			if len(field.names) != 0 && field.type != nil {
-				push_generic_token(p, .Colon, 0)
-			}
-		}
-
-		if field.type != nil && field.default_value != nil {
-			visit_expr(p, field.type)
-			push_generic_token(p, .Eq, 1)
-			visit_expr(p, field.default_value)
-		} else if field.type != nil {
-			visit_expr(p, field.type)
-		} else {
-			push_generic_token(p, .Colon, 1)
-			push_generic_token(p, .Eq, 0)
-			visit_expr(p, field.default_value)
-		}
-
-		if i != len(list.list) - 1 {
-			push_generic_token(p, .Comma, 0)
-		}
-	}
-}

+ 0 - 4
examples/all/all_main.odin

@@ -92,9 +92,7 @@ import virtual          "core:mem/virtual"
 
 import ast              "core:odin/ast"
 import doc_format       "core:odin/doc-format"
-import odin_format      "core:odin/format"
 import odin_parser      "core:odin/parser"
-import odin_printer     "core:odin/printer"
 import odin_tokenizer   "core:odin/tokenizer"
 
 import spall            "core:prof/spall"
@@ -209,9 +207,7 @@ _ :: mem
 _ :: virtual
 _ :: ast
 _ :: doc_format
-_ :: odin_format
 _ :: odin_parser
-_ :: odin_printer
 _ :: odin_tokenizer
 _ :: os
 _ :: spall

+ 0 - 12
tests/core/odin/test_parser.odin

@@ -3,9 +3,7 @@ package test_core_odin_parser
 import "core:fmt"
 import "core:odin/ast"
 import "core:odin/parser"
-import "core:odin/printer"
 import "core:os"
-import "core:strings"
 import "core:testing"
 
 
@@ -81,14 +79,4 @@ Foo :: bit_field uint {
 	p := parser.default_parser()
 	ok := parser.parse_file(&p, &file)
 	expect(t, ok == true, "bad parse")
-
-	cfg := printer.default_style
-	cfg.newline_style = .LF
-	print := printer.make_printer(cfg)
-	out := printer.print(&print, &file)
-
-	tsrc := strings.trim_space(file.src)
-	tout := strings.trim_space(out)
-
-	expect(t, tsrc == tout, fmt.tprintf("\n%s\n!=\n%s", tsrc, tout))
 }