Browse Source

Commit fix

gingerBill 5 years ago
parent
commit
5fa54fd2cc

+ 1 - 1
core/container/set.odin

@@ -57,7 +57,7 @@ set_add :: proc(m: ^Set, key: u64) {
 		_set_grow(m);
 	}
 
-	i := _set_find_or_make(m, key);
+	_ = _set_find_or_make(m, key);
 	if _set_full(m^) {
 		_set_grow(m);
 	}

+ 0 - 2
core/container/small_array.odin

@@ -1,7 +1,5 @@
 package container
 
-import "core:mem"
-
 Small_Array :: struct(N: int, T: typeid) where N >= 0 {
 	data: [N]T,
 	len:  int,

+ 30 - 14
core/encoding/json/marshal.odin

@@ -10,6 +10,7 @@ import "core:reflect"
 Marshal_Error :: enum {
 	None,
 	Unsupported_Type,
+	Invalid_Data,
 }
 
 marshal :: proc(v: any, allocator := context.allocator) -> ([]byte, Marshal_Error) {
@@ -17,7 +18,7 @@ marshal :: proc(v: any, allocator := context.allocator) -> ([]byte, Marshal_Erro
 
 	err := marshal_arg(&b, v);
 
-	if err != Marshal_Error.None {
+	if err != .None {
 		strings.destroy_builder(&b);
 		return nil, err;
 	}
@@ -34,15 +35,15 @@ marshal_arg :: proc(b: ^strings.Builder, v: any) -> Marshal_Error {
 	using runtime;
 	if v == nil {
 		write_string(b, "null");
-		return Marshal_Error.None;
+		return .None;
 	}
 
 	ti := type_info_base(type_info_of(v.id));
 	a := any{v.data, ti.id};
 
-	#partial switch info in ti.variant {
+	switch info in ti.variant {
 	case Type_Info_Named:
-		panic("Unreachable");
+		unreachable();
 
 	case Type_Info_Integer:
 		buf: [21]byte;
@@ -108,7 +109,10 @@ marshal_arg :: proc(b: ^strings.Builder, v: any) -> Marshal_Error {
 		write_string(b, string(s));
 
 	case Type_Info_Complex:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
+
+	case Type_Info_Quaternion:
+		return .Unsupported_Type;
 
 	case Type_Info_String:
 		switch s in a {
@@ -128,19 +132,31 @@ marshal_arg :: proc(b: ^strings.Builder, v: any) -> Marshal_Error {
 		write_string(b, val ? "true" : "false");
 
 	case Type_Info_Any:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
 
 	case Type_Info_Type_Id:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
 
 	case Type_Info_Pointer:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
 
 	case Type_Info_Procedure:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
 
 	case Type_Info_Tuple:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
+
+	case Type_Info_Enumerated_Array:
+		return .Unsupported_Type;
+
+	case Type_Info_Simd_Vector:
+		return .Unsupported_Type;
+
+	case Type_Info_Relative_Pointer:
+		return .Unsupported_Type;
+
+	case Type_Info_Relative_Slice:
+		return .Unsupported_Type;
 
 	case Type_Info_Array:
 		write_byte(b, '[');
@@ -180,7 +196,7 @@ marshal_arg :: proc(b: ^strings.Builder, v: any) -> Marshal_Error {
 		write_byte(b, '{');
 		if m != nil {
 			if info.generated_struct == nil {
-				return Marshal_Error.Unsupported_Type;
+				return .Unsupported_Type;
 			}
 			entries    := &m.entries;
 			gs         := type_info_base(info.generated_struct).variant.(Type_Info_Struct);
@@ -320,11 +336,11 @@ marshal_arg :: proc(b: ^strings.Builder, v: any) -> Marshal_Error {
 		write_u64(b, bit_data);
 
 
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
 
 	case Type_Info_Opaque:
-		return Marshal_Error.Unsupported_Type;
+		return .Unsupported_Type;
 	}
 
-	return Marshal_Error.None;
+	return .None;
 }

+ 45 - 44
core/encoding/json/parser.odin

@@ -5,11 +5,12 @@ import "core:unicode/utf8"
 import "core:strconv"
 
 Parser :: struct {
-	tok:        Tokenizer,
-	prev_token: Token,
-	curr_token: Token,
-	spec:       Specification,
-	allocator:  mem.Allocator,
+	tok:            Tokenizer,
+	prev_token:     Token,
+	curr_token:     Token,
+	spec:           Specification,
+	allocator:      mem.Allocator,
+	unmarshal_data: any,
 }
 
 make_parser :: proc(data: []byte, spec := Specification.JSON, allocator := context.allocator) -> Parser {
@@ -46,7 +47,7 @@ advance_token :: proc(p: ^Parser) -> (Token, Error) {
 }
 
 
-allow_token :: proc(p: ^Parser, kind: Kind) -> bool {
+allow_token :: proc(p: ^Parser, kind: Token_Kind) -> bool {
 	if p.curr_token.kind == kind {
 		advance_token(p);
 		return true;
@@ -54,13 +55,13 @@ allow_token :: proc(p: ^Parser, kind: Kind) -> bool {
 	return false;
 }
 
-expect_token :: proc(p: ^Parser, kind: Kind) -> Error {
+expect_token :: proc(p: ^Parser, kind: Token_Kind) -> Error {
 	prev := p.curr_token;
 	advance_token(p);
 	if prev.kind == kind {
-		return Error.None;
+		return .None;
 	}
-	return Error.Unexpected_Token;
+	return .Unexpected_Token;
 }
 
 
@@ -71,44 +72,44 @@ parse_value :: proc(p: ^Parser) -> (value: Value, err: Error) {
 
 	token := p.curr_token;
 	#partial switch token.kind {
-	case Kind.Null:
+	case .Null:
 		value.value = Null{};
 		advance_token(p);
 		return;
-	case Kind.False:
+	case .False:
 		value.value = Boolean(false);
 		advance_token(p);
 		return;
-	case Kind.True:
+	case .True:
 		value.value = Boolean(true);
 		advance_token(p);
 		return;
 
-	case Kind.Integer:
+	case .Integer:
 		i, _ := strconv.parse_i64(token.text);
 		value.value = Integer(i);
 		advance_token(p);
 		return;
-	case Kind.Float:
+	case .Float:
 		f, _ := strconv.parse_f64(token.text);
 		value.value = Float(f);
 		advance_token(p);
 		return;
-	case Kind.String:
+	case .String:
 		value.value = String(unquote_string(token, p.spec, p.allocator));
 		advance_token(p);
 		return;
 
-	case Kind.Open_Brace:
+	case .Open_Brace:
 		return parse_object(p);
 
-	case Kind.Open_Bracket:
+	case .Open_Bracket:
 		return parse_array(p);
 
 	case:
 		if p.spec == Specification.JSON5 {
 			#partial switch token.kind {
-			case Kind.Infinity:
+			case .Infinity:
 				inf: u64 = 0x7ff0000000000000;
 				if token.text[0] == '-' {
 					inf = 0xfff0000000000000;
@@ -116,7 +117,7 @@ parse_value :: proc(p: ^Parser) -> (value: Value, err: Error) {
 				value.value = transmute(f64)inf;
 				advance_token(p);
 				return;
-			case Kind.NaN:
+			case .NaN:
 				nan: u64 = 0x7ff7ffffffffffff;
 				if token.text[0] == '-' {
 					nan = 0xfff7ffffffffffff;
@@ -128,7 +129,7 @@ parse_value :: proc(p: ^Parser) -> (value: Value, err: Error) {
 		}
 	}
 
-	err = Error.Unexpected_Token;
+	err = .Unexpected_Token;
 	advance_token(p);
 	return;
 }
@@ -136,36 +137,36 @@ parse_value :: proc(p: ^Parser) -> (value: Value, err: Error) {
 parse_array :: proc(p: ^Parser) -> (value: Value, err: Error) {
 	value.pos = p.curr_token.pos;
 	defer value.end = token_end_pos(p.prev_token);
-	if err = expect_token(p, Kind.Open_Bracket); err != Error.None {
+	if err = expect_token(p, .Open_Bracket); err != .None {
 		return;
 	}
 
 	array: Array;
 	array.allocator = p.allocator;
-	defer if err != Error.None {
+	defer if err != .None {
 		for elem in array {
 			destroy_value(elem);
 		}
 		delete(array);
 	}
 
-	for p.curr_token.kind != Kind.Close_Bracket {
+	for p.curr_token.kind != .Close_Bracket {
 		elem, elem_err := parse_value(p);
-		if elem_err != Error.None {
+		if elem_err != .None {
 			err = elem_err;
 			return;
 		}
 		append(&array, elem);
 
 		// Disallow trailing commas for the time being
-		if allow_token(p, Kind.Comma) {
+		if allow_token(p, .Comma) {
 			continue;
 		} else {
 			break;
 		}
 	}
 
-	if err = expect_token(p, Kind.Close_Bracket); err != Error.None {
+	if err = expect_token(p, .Close_Bracket); err != .None {
 		return;
 	}
 
@@ -184,18 +185,18 @@ clone_string :: proc(s: string, allocator: mem.Allocator) -> string {
 parse_object_key :: proc(p: ^Parser) -> (key: string, err: Error) {
 	tok := p.curr_token;
 	if p.spec == Specification.JSON5 {
-		if tok.kind == Kind.String {
-			expect_token(p, Kind.String);
+		if tok.kind == .String {
+			expect_token(p, .String);
 			key = unquote_string(tok, p.spec, p.allocator);
 			return;
-		} else if tok.kind == Kind.Ident {
-			expect_token(p, Kind.Ident);
+		} else if tok.kind == .Ident {
+			expect_token(p, .Ident);
 			key = clone_string(tok.text, p.allocator);
 			return;
 		}
 	}
-	if tok_err := expect_token(p, Kind.String); tok_err != Error.None {
-		err = Error.Expected_String_For_Object_Key;
+	if tok_err := expect_token(p, .String); tok_err != .None {
+		err = .Expected_String_For_Object_Key;
 		return;
 	}
 	key = unquote_string(tok, p.spec, p.allocator);
@@ -206,14 +207,14 @@ parse_object :: proc(p: ^Parser) -> (value: Value, err: Error) {
 	value.pos = p.curr_token.pos;
 	defer value.end = token_end_pos(p.prev_token);
 
-	if err = expect_token(p, Kind.Open_Brace); err != Error.None {
+	if err = expect_token(p, .Open_Brace); err != .None {
 		value.pos = p.curr_token.pos;
 		return;
 	}
 
 	obj: Object;
 	obj.allocator = p.allocator;
-	defer if err != Error.None {
+	defer if err != .None {
 		for key, elem in obj {
 			delete(key, p.allocator);
 			destroy_value(elem);
@@ -221,30 +222,30 @@ parse_object :: proc(p: ^Parser) -> (value: Value, err: Error) {
 		delete(obj);
 	}
 
-	for p.curr_token.kind != Kind.Close_Brace {
+	for p.curr_token.kind != .Close_Brace {
 		key: string;
 		key, err = parse_object_key(p);
-		if err != Error.None {
+		if err != .None {
 			delete(key, p.allocator);
 			value.pos = p.curr_token.pos;
 			return;
 		}
 
-		if colon_err := expect_token(p, Kind.Colon); colon_err != Error.None {
-			err = Error.Expected_Colon_After_Key;
+		if colon_err := expect_token(p, .Colon); colon_err != .None {
+			err = .Expected_Colon_After_Key;
 			value.pos = p.curr_token.pos;
 			return;
 		}
 
 		elem, elem_err := parse_value(p);
-		if elem_err != Error.None {
+		if elem_err != .None {
 			err = elem_err;
 			value.pos = p.curr_token.pos;
 			return;
 		}
 
 		if key in obj {
-			err = Error.Duplicate_Object_Key;
+			err = .Duplicate_Object_Key;
 			value.pos = p.curr_token.pos;
 			delete(key, p.allocator);
 			return;
@@ -254,12 +255,12 @@ parse_object :: proc(p: ^Parser) -> (value: Value, err: Error) {
 
 		if p.spec == Specification.JSON5 {
 			// Allow trailing commas
-			if allow_token(p, Kind.Comma) {
+			if allow_token(p, .Comma) {
 				continue;
 			}
 		} else {
 			// Disallow trailing commas
-			if allow_token(p, Kind.Comma) {
+			if allow_token(p, .Comma) {
 				continue;
 			} else {
 				break;
@@ -267,7 +268,7 @@ parse_object :: proc(p: ^Parser) -> (value: Value, err: Error) {
 		}
 	}
 
-	if err = expect_token(p, Kind.Close_Brace); err != Error.None {
+	if err = expect_token(p, .Close_Brace); err != .None {
 		value.pos = p.curr_token.pos;
 		return;
 	}
@@ -316,7 +317,7 @@ unquote_string :: proc(token: Token, spec: Specification, allocator := context.a
 		return r;
 	}
 
-	if token.kind != Kind.String {
+	if token.kind != .String {
 		return "";
 	}
 	s := token.text;

+ 47 - 45
core/encoding/json/tokenizer.odin

@@ -4,12 +4,13 @@ import "core:unicode/utf8"
 
 Token :: struct {
 	using pos: Pos,
-	kind: Kind,
+	kind: Token_Kind,
 	text: string,
 }
 
-Kind :: enum {
+Token_Kind :: enum {
 	Invalid,
+	EOF,
 
 	Null,
 	False,
@@ -122,7 +123,7 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 				t.pos.column = 1;
 				next_rune(t);
 			case:
-				if t.spec == Specification.JSON5 {
+				if t.spec == .JSON5 {
 					switch t.r {
 					case 0x2028, 0x2029, 0xFEFF:
 						next_rune(t);
@@ -159,36 +160,37 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 
 	token.pos = t.pos;
 
-	token.kind = Kind.Invalid;
+	token.kind = .Invalid;
 
 	curr_rune := t.r;
 	next_rune(t);
 
 	block: switch curr_rune {
 	case utf8.RUNE_ERROR:
-		err = Error.Illegal_Character;
+		err = .Illegal_Character;
 	case utf8.RUNE_EOF, '\x00':
-		err = Error.EOF;
+		token.kind = .EOF;
+		err = .EOF;
 
 	case 'A'..'Z', 'a'..'z', '_':
-		token.kind = Kind.Ident;
+		token.kind = .Ident;
 
 		skip_alphanum(t);
 
 		switch str := string(t.data[token.offset:t.offset]); str {
-		case "null":  token.kind = Kind.Null;
-		case "false": token.kind = Kind.False;
-		case "true":  token.kind = Kind.True;
+		case "null":  token.kind = .Null;
+		case "false": token.kind = .False;
+		case "true":  token.kind = .True;
 		case:
-			if t.spec == Specification.JSON5 do switch str {
-			case "Infinity": token.kind = Kind.Infinity;
-			case "NaN":      token.kind = Kind.NaN;
+			if t.spec == .JSON5 do switch str {
+			case "Infinity": token.kind = .Infinity;
+			case "NaN":      token.kind = .NaN;
 			}
 		}
 
 	case '+':
-		err = Error.Illegal_Character;
-		if t.spec != Specification.JSON5 {
+		err = .Illegal_Character;
+		if t.spec != .JSON5 {
 			break;
 		}
 		fallthrough;
@@ -199,15 +201,15 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 			// Okay
 		case:
 			// Illegal use of +/-
-			err = Error.Illegal_Character;
+			err = .Illegal_Character;
 
-			if t.spec == Specification.JSON5 {
+			if t.spec == .JSON5 {
 				if t.r == 'I' || t.r == 'N' {
 					skip_alphanum(t);
 				}
 				switch string(t.data[token.offset:t.offset]) {
-				case "-Infinity": token.kind = Kind.Infinity;
-				case "-NaN":      token.kind = Kind.NaN;
+				case "-Infinity": token.kind = .Infinity;
+				case "-NaN":      token.kind = .NaN;
 				}
 			}
 			break block;
@@ -215,8 +217,8 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 		fallthrough;
 
 	case '0'..'9':
-		token.kind = Kind.Integer;
-		if t.spec == Specification.JSON5 { // Hexadecimal Numbers
+		token.kind = .Integer;
+		if t.spec == .JSON5 { // Hexadecimal Numbers
 			if curr_rune == '0' && (t.r == 'x' || t.r == 'X') {
 				next_rune(t);
 				skip_hex_digits(t);
@@ -227,7 +229,7 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 		skip_digits(t);
 
 		if t.r == '.' {
-			token.kind = Kind.Float;
+			token.kind = .Float;
 			next_rune(t);
 			skip_digits(t);
 		}
@@ -241,12 +243,12 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 
 		str := string(t.data[token.offset:t.offset]);
 		if !is_valid_number(str, t.spec) {
-			err = Error.Invalid_Number;
+			err = .Invalid_Number;
 		}
 
 	case '.':
-		err = Error.Illegal_Character;
-		if t.spec == Specification.JSON5 { // Allow leading decimal point
+		err = .Illegal_Character;
+		if t.spec == .JSON5 { // Allow leading decimal point
 			skip_digits(t);
 			if t.r == 'e' || t.r == 'E' {
 				switch r := next_rune(t); r {
@@ -257,24 +259,24 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 			}
 			str := string(t.data[token.offset:t.offset]);
 			if !is_valid_number(str, t.spec) {
-				err = Error.Invalid_Number;
+				err = .Invalid_Number;
 			}
 		}
 
 
 	case '\'':
-		err = Error.Illegal_Character;
-		if t.spec != Specification.JSON5 {
+		err = .Illegal_Character;
+		if t.spec != .JSON5 {
 			break;
 		}
 		fallthrough;
 	case '"':
-		token.kind = Kind.String;
+		token.kind = .String;
 		quote := curr_rune;
 		for t.offset < len(t.data) {
 			r := t.r;
 			if r == '\n' || r < 0 {
-				err = Error.String_Not_Terminated;
+				err = .String_Not_Terminated;
 				break;
 			}
 			next_rune(t);
@@ -288,20 +290,20 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 
 		str := string(t.data[token.offset : t.offset]);
 		if !is_valid_string_literal(str, t.spec) {
-			err = Error.Invalid_String;
+			err = .Invalid_String;
 		}
 
 
-	case ',': token.kind = Kind.Comma;
-	case ':': token.kind = Kind.Colon;
-	case '{': token.kind = Kind.Open_Brace;
-	case '}': token.kind = Kind.Close_Brace;
-	case '[': token.kind = Kind.Open_Bracket;
-	case ']': token.kind = Kind.Close_Bracket;
+	case ',': token.kind = .Comma;
+	case ':': token.kind = .Colon;
+	case '{': token.kind = .Open_Brace;
+	case '}': token.kind = .Close_Brace;
+	case '[': token.kind = .Open_Bracket;
+	case ']': token.kind = .Close_Bracket;
 
 	case '/':
-		err = Error.Illegal_Character;
-		if t.spec == Specification.JSON5 {
+		err = .Illegal_Character;
+		if t.spec == .JSON5 {
 			switch t.r {
 			case '/':
 				// Single-line comments
@@ -319,11 +321,11 @@ get_token :: proc(t: ^Tokenizer) -> (token: Token, err: Error) {
 						}
 					}
 				}
-				err = Error.EOF;
+				err = .EOF;
 			}
 		}
 
-	case: err = Error.Illegal_Character;
+	case: err = .Illegal_Character;
 	}
 
 	token.text = string(t.data[token.offset : t.offset]);
@@ -344,7 +346,7 @@ is_valid_number :: proc(str: string, spec: Specification) -> bool {
 		if s == "" {
 			return false;
 		}
-	} else if spec == Specification.JSON5 {
+	} else if spec == .JSON5 {
 		if s[0] == '+' { // Allow positive sign
 			s = s[1:];
 			if s == "" {
@@ -360,7 +362,7 @@ is_valid_number :: proc(str: string, spec: Specification) -> bool {
 		s = s[1:];
 		for len(s) > 0 && '0' <= s[0] && s[0] <= '9' do s = s[1:];
 	case '.':
-		if spec == Specification.JSON5 { // Allow leading decimal point
+		if spec == .JSON5 { // Allow leading decimal point
 			s = s[1:];
 		} else {
 			return false;
@@ -369,7 +371,7 @@ is_valid_number :: proc(str: string, spec: Specification) -> bool {
 		return false;
 	}
 
-	if spec == Specification.JSON5 {
+	if spec == .JSON5 {
 		if len(s) == 1 && s[0] == '.' { // Allow trailing decimal point
 			return true;
 		}
@@ -406,7 +408,7 @@ is_valid_string_literal :: proc(str: string, spec: Specification) -> bool {
 		return false;
 	}
 	if s[0] != '"' || s[len(s)-1] != '"' {
-		if spec == Specification.JSON5 {
+		if spec == .JSON5 {
 			if s[0] != '\'' || s[len(s)-1] != '\'' {
 				return false;
 			}

+ 2 - 1
core/encoding/json/types.odin

@@ -2,7 +2,8 @@ package json
 
 Specification :: enum {
 	JSON,
-	JSON5,
+	JSON5, // https://json5.org/
+	// MJSON, // http://bitsquid.blogspot.com/2009/09/json-configuration-data.html
 }
 
 Null    :: distinct rawptr;

+ 15 - 15
core/encoding/json/validator.odin

@@ -14,27 +14,27 @@ is_valid :: proc(data: []byte, spec := Specification.JSON) -> bool {
 validate_object_key :: proc(p: ^Parser) -> bool {
 	tok := p.curr_token;
 	if p.spec == Specification.JSON5 {
-		if tok.kind == Kind.String {
-			expect_token(p, Kind.String);
+		if tok.kind == .String {
+			expect_token(p, .String);
 			return true;
-		} else if tok.kind == Kind.Ident {
-			expect_token(p, Kind.Ident);
+		} else if tok.kind == .Ident {
+			expect_token(p, .Ident);
 			return true;
 		}
 	}
-	err := expect_token(p, Kind.String);
+	err := expect_token(p, .String);
 	return err == Error.None;
 }
 validate_object :: proc(p: ^Parser) -> bool {
-	if err := expect_token(p, Kind.Open_Brace); err != Error.None {
+	if err := expect_token(p, .Open_Brace); err != Error.None {
 		return false;
 	}
 
-	for p.curr_token.kind != Kind.Close_Brace {
+	for p.curr_token.kind != .Close_Brace {
 		if !validate_object_key(p) {
 			return false;
 		}
-		if colon_err := expect_token(p, Kind.Colon); colon_err != Error.None {
+		if colon_err := expect_token(p, .Colon); colon_err != Error.None {
 			return false;
 		}
 
@@ -44,12 +44,12 @@ validate_object :: proc(p: ^Parser) -> bool {
 
 		if p.spec == Specification.JSON5 {
 			// Allow trailing commas
-			if allow_token(p, Kind.Comma) {
+			if allow_token(p, .Comma) {
 				continue;
 			}
 		} else {
 			// Disallow trailing commas
-			if allow_token(p, Kind.Comma) {
+			if allow_token(p, .Comma) {
 				continue;
 			} else {
 				break;
@@ -57,31 +57,31 @@ validate_object :: proc(p: ^Parser) -> bool {
 		}
 	}
 
-	if err := expect_token(p, Kind.Close_Brace); err != Error.None {
+	if err := expect_token(p, .Close_Brace); err != Error.None {
 		return false;
 	}
 	return true;
 }
 
 validate_array :: proc(p: ^Parser) -> bool {
-	if err := expect_token(p, Kind.Open_Bracket); err != Error.None {
+	if err := expect_token(p, .Open_Bracket); err != Error.None {
 		return false;
 	}
 
-	for p.curr_token.kind != Kind.Close_Bracket {
+	for p.curr_token.kind != .Close_Bracket {
 		if !validate_value(p) {
 			return false;
 		}
 
 		// Disallow trailing commas for the time being
-		if allow_token(p, Kind.Comma) {
+		if allow_token(p, .Comma) {
 			continue;
 		} else {
 			break;
 		}
 	}
 
-	if err := expect_token(p, Kind.Close_Bracket); err != Error.None {
+	if err := expect_token(p, .Close_Bracket); err != Error.None {
 		return false;
 	}