Browse Source

Use `or_break` and `or_continue` where appropriate in the core library

gingerBill 2 years ago
parent
commit
14adcb9db8

+ 4 - 5
core/encoding/json/marshal.odin

@@ -265,9 +265,8 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
 
 			i := 0
 			for bucket_index in 0..<map_cap {
-				if !runtime.map_hash_is_valid(hs[bucket_index]) {
-					continue
-				}
+				runtime.map_hash_is_valid(hs[bucket_index]) or_continue
+
 				opt_write_iteration(w, opt, i) or_return
 				i += 1
 
@@ -284,8 +283,8 @@ marshal_to_writer :: proc(w: io.Writer, v: any, opt: ^Marshal_Options) -> (err:
 					#partial switch info in ti.variant {
 					case runtime.Type_Info_String:
 						switch s in a {
-							case string: name = s
-							case cstring: name = string(s)
+						case string: name = s
+						case cstring: name = string(s)
 						}
 						opt_write_key(w, opt, name) or_return
 

+ 2 - 2
core/encoding/xml/helpers.odin

@@ -21,13 +21,13 @@ find_child_by_ident :: proc(doc: ^Document, parent_id: Element_ID, ident: string
 			/*
 				Skip commments. They have no name.
 			*/
-			if child.kind  != .Element                { continue }
+			if child.kind != .Element { continue }
 
 			/*
 				If the ident matches and it's the nth such child, return it.
 			*/
 			if child.ident == ident {
-				if count == nth                       { return child_id, true }
+				if count == nth { return child_id, true }
 				count += 1
 			}
 		}

+ 3 - 6
core/fmt/fmt.odin

@@ -755,9 +755,8 @@ _parse_int :: proc(s: string, offset: int) -> (result: int, new_offset: int, ok:
 	new_offset = offset
 	for new_offset < len(s) {
 		c := s[new_offset]
-		if !is_digit(c) {
-			break
-		}
+		is_digit(c) or_break
+
 		new_offset += 1
 
 		result *= 10
@@ -2555,9 +2554,7 @@ fmt_value :: proc(fi: ^Info, v: any, verb: rune) {
 			ks, vs, hs, _, _ := runtime.map_kvh_data_dynamic(m^, info.map_info)
 			j := 0
 			for bucket_index in 0..<map_cap {
-				if !runtime.map_hash_is_valid(hs[bucket_index]) {
-					continue
-				}
+				runtime.map_hash_is_valid(hs[bucket_index]) or_continue
 
 				if j > 0 {
 					io.write_string(fi.writer, ", ", &fi.n)

+ 4 - 8
core/math/big/prime.odin

@@ -1214,7 +1214,6 @@ internal_random_prime :: proc(a: ^Int, size_in_bits: int, trials: int, flags :=
 		trials = number_of_rabin_miller_trials(size_in_bits)
 	}
 
-	res: bool
 	RANDOM_PRIME_ITERATIONS_USED = 0
 
 	for {
@@ -1251,11 +1250,7 @@ internal_random_prime :: proc(a: ^Int, size_in_bits: int, trials: int, flags :=
 		/*
 			Is it prime?
 		*/
-		res = internal_int_is_prime(a, trials)                       or_return
-
-		if (!res) {
-			continue
-		}
+		internal_int_is_prime(a, trials) or_return or_continue
 
 		if .Safe in flags {
 			/*
@@ -1267,9 +1262,10 @@ internal_random_prime :: proc(a: ^Int, size_in_bits: int, trials: int, flags :=
 			/*
 				Is it prime?
 			*/
-			res = internal_int_is_prime(a, trials)                   or_return
+			if internal_int_is_prime(a, trials) or_return {
+				break
+			}
 		}
-		if res { break }
 	}
 
 	if .Safe in flags {

+ 17 - 29
core/net/dns.odin

@@ -247,23 +247,19 @@ get_dns_records_from_nameservers :: proc(hostname: string, type: DNS_Record_Type
 		}
 		defer close(conn)
 
-		_, send_err := send(conn, dns_packet[:], name_server)
-		if send_err != nil {
-			continue
-		}
+		_ = send(conn, dns_packet[:], name_server) or_continue
 
-		set_err := set_option(conn, .Receive_Timeout, time.Second * 1)
-		if set_err != nil {
+		if set_option(conn, .Receive_Timeout, time.Second * 1) != nil {
 			return nil, .Connection_Error
 		}
 
-		recv_sz, _, recv_err := recv_udp(conn, dns_response_buf[:])
-		if recv_err == UDP_Recv_Error.Timeout {
-			continue
-		} else if recv_err != nil {
-			continue
-		}
-
+		// recv_sz, _, recv_err := recv_udp(conn, dns_response_buf[:])
+		// if recv_err == UDP_Recv_Error.Timeout {
+		// 	continue
+		// } else if recv_err != nil {
+		// 	continue
+		// }
+		recv_sz, _ := recv_udp(conn, dns_response_buf[:]) or_continue
 		if recv_sz == 0 {
 			continue
 		}
@@ -429,11 +425,9 @@ load_hosts :: proc(hosts_file_path: string, allocator := context.allocator) -> (
 		}
 
 		for hostname in splits[1:] {
-			if len(hostname) == 0 {
-				continue
+			if len(hostname) != 0 {
+				append(&_hosts, DNS_Host_Entry{hostname, addr})
 			}
-
-			append(&_hosts, DNS_Host_Entry{hostname, addr})
 		}
 	}
 
@@ -833,11 +827,9 @@ parse_response :: proc(response: []u8, filter: DNS_Record_Type = nil, allocator
 		}
 
 		rec := parse_record(response, &cur_idx, filter) or_return
-		if rec == nil {
-			continue
+		if rec != nil {
+			append(&_records, rec)
 		}
-
-		append(&_records, rec)
 	}
 
 	for _ in 0..<authority_count {
@@ -846,11 +838,9 @@ parse_response :: proc(response: []u8, filter: DNS_Record_Type = nil, allocator
 		}
 
 		rec := parse_record(response, &cur_idx, filter) or_return
-		if rec == nil {
-			continue
+		if rec != nil {
+			append(&_records, rec)
 		}
-
-		append(&_records, rec)
 	}
 
 	for _ in 0..<additional_count {
@@ -859,11 +849,9 @@ parse_response :: proc(response: []u8, filter: DNS_Record_Type = nil, allocator
 		}
 
 		rec := parse_record(response, &cur_idx, filter) or_return
-		if rec == nil {
-			continue
+		if rec != nil {
+			append(&_records, rec)
 		}
-
-		append(&_records, rec)
 	}
 
 	return _records[:], true

+ 10 - 3
core/net/dns_windows.odin

@@ -45,15 +45,22 @@ _get_dns_records_os :: proc(hostname: string, type: DNS_Record_Type, allocator :
 
 	count := 0
 	for r := rec; r != nil; r = r.pNext {
-		if r.wType != u16(type) do continue // NOTE(tetra): Should never happen, but...
+		if r.wType != u16(type) {
+			// NOTE(tetra): Should never happen, but...
+			continue
+		}
 		count += 1
 	}
 
 	recs := make([dynamic]DNS_Record, 0, count)
-	if recs == nil do return nil, .System_Error // return no results if OOM.
+	if recs == nil {
+		return nil, .System_Error // return no results if OOM.
+	}
 
 	for r := rec; r != nil; r = r.pNext {
-		if r.wType != u16(type) do continue // NOTE(tetra): Should never happen, but...
+		if r.wType != u16(type) {
+			continue // NOTE(tetra): Should never happen, but...
+		}
 
 		base_record := DNS_Record_Base{
 			record_name = strings.clone(string(r.pName)),

+ 2 - 2
core/odin/ast/ast.odin

@@ -551,8 +551,8 @@ unparen_expr :: proc(expr: ^Expr) -> (val: ^Expr) {
 		return
 	}
 	for {
-		e, ok := val.derived.(^Paren_Expr)
-		if !ok || e.expr == nil {
+		e := val.derived.(^Paren_Expr) or_break
+		if e.expr == nil {
 			break
 		}
 		val = e.expr

+ 8 - 25
core/odin/parser/parser.odin

@@ -1053,9 +1053,7 @@ parse_attribute :: proc(p: ^Parser, tok: tokenizer.Token, open_kind, close_kind:
 			}
 			append(&elems, elem)
 
-			if !allow_token(p, .Comma) {
-				break
-			}
+			allow_token(p, .Comma) or_break
 		}
 		p.expr_level -= 1
 		close = expect_token_after(p, close_kind, "attribute")
@@ -1174,9 +1172,7 @@ parse_foreign_decl :: proc(p: ^Parser) -> ^ast.Decl {
 				path := expect_token(p, .String)
 				append(&fullpaths, path.text)
 
-				if !allow_token(p, .Comma) {
-					break
-				}
+				allow_token(p, .Comma) or_break
 			}
 			expect_token(p, .Close_Brace)
 		} else {
@@ -1961,9 +1957,7 @@ parse_field_list :: proc(p: ^Parser, follow: tokenizer.Token_Kind, allowed_flags
 
 		eaf := Expr_And_Flags{param, prefix_flags}
 		append(&list, eaf)
-		if !allow_token(p, .Comma) {
-			break
-		}
+		allow_token(p, .Comma) or_break
 	}
 
 	if p.curr_tok.kind != .Colon {
@@ -2011,10 +2005,7 @@ parse_field_list :: proc(p: ^Parser, follow: tokenizer.Token_Kind, allowed_flags
 			names = parse_ident_list(p, allow_poly_names)
 
 			total_name_count += len(names)
-			ok := handle_field(p, &seen_ellipsis, &fields, docs, names, allowed_flags, set_flags)
-			if !ok {
-				break
-			}
+			handle_field(p, &seen_ellipsis, &fields, docs, names, allowed_flags, set_flags) or_break
 		}
 	}
 
@@ -2361,9 +2352,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
 				elem := parse_expr(p, false)
 				append(&args, elem)
 
-				if !allow_token(p, .Comma) {
-					break
-				}
+				allow_token(p, .Comma) or_break
 			}
 
 			close := expect_token(p, .Close_Brace)
@@ -2696,9 +2685,7 @@ parse_operand :: proc(p: ^Parser, lhs: bool) -> ^ast.Expr {
 			if _, ok := type.derived.(^ast.Bad_Expr); !ok {
 				append(&variants, type)
 			}
-			if !allow_token(p, .Comma) {
-				break
-			}
+			allow_token(p, .Comma) or_break
 		}
 
 		close := expect_closing_brace_of_field_list(p)
@@ -2916,9 +2903,7 @@ parse_elem_list :: proc(p: ^Parser) -> []^ast.Expr {
 
 		append(&elems, elem)
 
-		if !allow_token(p, .Comma) {
-			break
-		}
+		allow_token(p, .Comma) or_break
 	}
 
 	return elems[:]
@@ -2993,9 +2978,7 @@ parse_call_expr :: proc(p: ^Parser, operand: ^ast.Expr) -> ^ast.Expr {
 			seen_ellipsis = true
 		}
 
-		if !allow_token(p, .Comma) {
-			break
-		}
+		allow_token(p, .Comma) or_break
 	}
 
 	close := expect_token_after(p, .Close_Paren, "argument list")

+ 1 - 3
core/path/filepath/match.odin

@@ -99,9 +99,7 @@ scan_chunk :: proc(pattern: string) -> (star: bool, chunk, rest: string) {
 		case ']':
 			in_range = false
 		case '*':
-			if !in_range {
-				break scan_loop
-			}
+			in_range or_break scan_loop
 
 		}
 	}

+ 2 - 3
core/path/filepath/path.odin

@@ -392,9 +392,8 @@ rel :: proc(base_path, target_path: string, allocator := context.allocator) -> (
 		for ti < tl && target[ti] != SEPARATOR {
 			ti += 1
 		}
-		if !strings.equal_fold(target[t0:ti], base[b0:bi]) {
-			break
-		}
+		strings.equal_fold(target[t0:ti], base[b0:bi]) or_break
+
 		if bi < bl {
 			bi += 1
 		}

+ 1 - 4
core/path/slashpath/match.odin

@@ -93,10 +93,7 @@ scan_chunk :: proc(pattern: string) -> (star: bool, chunk, rest: string) {
 		case ']':
 			in_range = false
 		case '*':
-			if !in_range {
-				break scan_loop
-			}
-
+			in_range or_break scan_loop
 		}
 	}
 	return star, pattern[:i], pattern[i:]

+ 3 - 12
core/text/i18n/qt_linguist.odin

@@ -91,10 +91,7 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 		// Find messages in section.
 		nth: int
 		for {
-			message_id, message_found := xml.find_child_by_ident(ts, child_id, "message", nth)
-			if !message_found {
-				break
-			}
+			message_id := xml.find_child_by_ident(ts, child_id, "message", nth) or_break
 
 			numerus_tag, _ := xml.find_attribute_val_by_key(ts, message_id, "numerus")
 			has_plurals := numerus_tag == "yes"
@@ -131,10 +128,7 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 
 				num_plurals: int
 				for {
-					numerus_id, numerus_found := xml.find_child_by_ident(ts, translation_id, "numerusform", num_plurals)
-					if !numerus_found {
-						break
-					}
+					numerus_id := xml.find_child_by_ident(ts, translation_id, "numerusform", num_plurals) or_break
 					num_plurals += 1
 				}
 
@@ -145,10 +139,7 @@ parse_qt_linguist_from_bytes :: proc(data: []byte, options := DEFAULT_PARSE_OPTI
 
 				num_plurals = 0
 				for {
-					numerus_id, numerus_found := xml.find_child_by_ident(ts, translation_id, "numerusform", num_plurals)
-					if !numerus_found {
-						break
-					}
+					numerus_id := xml.find_child_by_ident(ts, translation_id, "numerusform", num_plurals) or_break
 					numerus := get_str(ts.elements[numerus_id].value[0]) or_return
 					numerus, _ = strings.intern_get(&translation.intern, numerus)
 					section[source][num_plurals] = numerus

+ 2 - 3
core/text/match/strlib.odin

@@ -775,10 +775,9 @@ gsub_with :: proc(
 	haystack := haystack
 
 	for {
-		length, err := find_aux(haystack, pattern, 0, false, &captures)
-
+		length := find_aux(haystack, pattern, 0, false, &captures) or_break
 		// done
-		if length == 0 || err != .OK {
+		if length == 0 {
 			break
 		}
 

+ 33 - 37
core/unicode/tools/generate_entity_table.odin

@@ -86,44 +86,40 @@ generate_encoding_entity_table :: proc() {
 
 			nth := 0
 			for {
-				character_entity, entity_ok := xml.find_child_by_ident(char, "entity", nth)
-				if !entity_ok { break }
-
-				nth   += 1
-				if name, name_ok := xml.find_attribute_val_by_key(character_entity, "id"); name_ok {
-
-					if len(name) == 0 {
-						/*
-							Invalid name. Skip.
-						*/
-						continue
-					}
-
-					if name == "\"\"" {
-						printf("%#v\n", char)
-						printf("%#v\n", character_entity)
-					}
-
-					if len(name) > max_name_length { longest_name  = name }
-					if len(name) < min_name_length { shortest_name = name }
-
-					min_name_length = min(min_name_length, len(name))
-					max_name_length = max(max_name_length, len(name))
-
-					e := Entity{
-						name        = name,
-						codepoint   = rune(codepoint),
-						description = description,
-					}
-
-					if _, seen := entity_map[name]; seen {
-						continue
-					}
-
-					entity_map[name] = e
-					append(&names, name)
-					count += 1
+				character_entity := xml.find_child_by_ident(char, "entity", nth) or_break
+				nth += 1
+				name := xml.find_attribute_val_by_key(character_entity, "id") or_continue
+				if len(name) == 0 {
+					/*
+						Invalid name. Skip.
+					*/
+					continue
 				}
+
+				if name == "\"\"" {
+					printf("%#v\n", char)
+					printf("%#v\n", character_entity)
+				}
+
+				if len(name) > max_name_length { longest_name  = name }
+				if len(name) < min_name_length { shortest_name = name }
+
+				min_name_length = min(min_name_length, len(name))
+				max_name_length = max(max_name_length, len(name))
+
+				e := Entity{
+					name        = name,
+					codepoint   = rune(codepoint),
+					description = description,
+				}
+
+				if name in entity_map {
+					continue
+				}
+
+				entity_map[name] = e
+				append(&names, name)
+				count += 1
 			}
 		}
 	}