Browse Source

Better error messages for import cycles

Ginger Bill 7 years ago
parent
commit
56a98a483f
5 changed files with 246 additions and 56 deletions
  1. 1 1
      core/os_x.odin
  2. 160 0
      core/types.odin
  3. 51 48
      src/checker.cpp
  4. 16 7
      src/parser.cpp
  5. 18 0
      src/types.cpp

+ 1 - 1
core/os_x.odin

@@ -233,7 +233,7 @@ access :: proc(path: string, mask: int) -> bool #inline {
 
 heap_alloc :: proc(size: int) -> rawptr #inline {
 	assert(size > 0);
-	return __unix_calloc(1, size);
+	return _unix_calloc(1, size);
 }
 heap_resize :: proc(ptr: rawptr, new_size: int) -> rawptr #inline {
 	return _unix_realloc(ptr, new_size);

+ 160 - 0
core/types.odin

@@ -1,3 +1,163 @@
+are_types_identical :: proc(a, b: ^Type_Info) -> bool {
+	if a == b do return true;
+
+	if (a == nil && b != nil) ||
+	   (a != nil && b == nil) {
+		return false;
+	}
+
+
+	switch {
+	case a.size != b.size, a.align != b.align:
+		return false;
+	}
+
+	switch x in a.variant {
+	case Type_Info_Named:
+		y, ok := b.variant.(Type_Info_Named);
+		if !ok do return false;
+		return x.base == y.base;
+
+	case Type_Info_Integer:
+		y, ok := b.variant.(Type_Info_Integer);
+		if !ok do return false;
+		return x.signed == y.signed;
+
+	case Type_Info_Rune:
+		_, ok := b.variant.(Type_Info_Rune);
+		return ok;
+
+	case Type_Info_Float:
+		_, ok := b.variant.(Type_Info_Float);
+		return ok;
+
+	case Type_Info_Complex:
+		_, ok := b.variant.(Type_Info_Complex);
+		return ok;
+
+	case Type_Info_String:
+		_, ok := b.variant.(Type_Info_String);
+		return ok;
+
+	case Type_Info_Boolean:
+		_, ok := b.variant.(Type_Info_Boolean);
+		return ok;
+
+	case Type_Info_Any:
+		_, ok := b.variant.(Type_Info_Any);
+		return ok;
+
+	case Type_Info_Pointer:
+		y, ok := b.variant.(Type_Info_Pointer);
+		return are_types_identical(x.elem, y.elem);
+
+	case Type_Info_Procedure:
+		y, ok := b.variant.(Type_Info_Procedure);
+		if !ok do return false;
+		switch {
+		case x.variadic   != y.variadic,
+		     x.convention != y.convention:
+			return false;
+		}
+
+		return are_types_identical(x.params, y.params) && are_types_identical(x.results, y.results);
+
+	case Type_Info_Array:
+		y, ok := b.variant.(Type_Info_Array);
+		if !ok do return false;
+		if x.count != y.count do return false;
+		return are_types_identical(x.elem, y.elem);
+
+	case Type_Info_Dynamic_Array:
+		y, ok := b.variant.(Type_Info_Dynamic_Array);
+		if !ok do return false;
+		return are_types_identical(x.elem, y.elem);
+
+	case Type_Info_Slice:
+		y, ok := b.variant.(Type_Info_Slice);
+		if !ok do return false;
+		return are_types_identical(x.elem, y.elem);
+
+	case Type_Info_Vector:
+		y, ok := b.variant.(Type_Info_Vector);
+		if !ok do return false;
+		if x.count != y.count do return false;
+		return are_types_identical(x.elem, y.elem);
+
+	case Type_Info_Tuple:
+		y, ok := b.variant.(Type_Info_Tuple);
+		if !ok do return false;
+		if len(x.types) != len(y.types) do return false;
+		for _, i in x.types {
+			xt, yt := x.types[i], y.types[i];
+			if !are_types_identical(xt, yt) {
+				return false;
+			}
+		}
+		return true;
+
+	case Type_Info_Struct:
+		y, ok := b.variant.(Type_Info_Struct);
+		if !ok do return false;
+	   	switch {
+		case len(x.types)   != len(y.types),
+		     x.is_packed    != y.is_packed,
+		     x.is_ordered   != y.is_ordered,
+		     x.is_raw_union != y.is_raw_union,
+		     x.custom_align != y.custom_align:
+		     return false;
+		}
+		for _, i in x.types {
+			xn, yn := x.names[i], y.names[i];
+			xt, yt := x.types[i], y.types[i];
+
+			if xn != yn do return false;
+			if !are_types_identical(xt, yt) do return false;
+		}
+		return true;
+
+	case Type_Info_Union:
+		y, ok := b.variant.(Type_Info_Union);
+		if !ok do return false;
+		if len(x.variants) != len(y.variants) do return false;
+
+		for _, i in x.variants {
+			xv, yv := x.variants[i], y.variants[i];
+			if !are_types_identical(xv, yv) do return false;
+		}
+		return true;
+
+	case Type_Info_Enum:
+		// NOTE(bill): Should be handled above
+		return false;
+
+	case Type_Info_Map:
+		y, ok := b.variant.(Type_Info_Map);
+		if !ok do return false;
+		return are_types_identical(x.key, y.key) && are_types_identical(x.value, y.value);
+
+	case Type_Info_Bit_Field:
+		y, ok := b.variant.(Type_Info_Bit_Field);
+		if !ok do return false;
+		if len(x.names) != len(y.names) do return false;
+
+		for _, i in x.names {
+			xb, yb := x.bits[i], y.bits[i];
+			xo, yo := x.offsets[i], y.offsets[i];
+			xn, yn := x.names[i], y.names[i];
+
+			if xb != yb do return false;
+			if xo != yo do return false;
+			if xn != yn do return false;
+		}
+
+		return true;
+	}
+
+	return false;
+}
+
+
 is_signed :: proc(info: ^Type_Info) -> bool {
 	if info == nil do return false;
 	switch i in type_info_base(info).variant {

+ 51 - 48
src/checker.cpp

@@ -249,6 +249,7 @@ void scope_reset(Scope *scope) {
 	map_clear  (&scope->implicit);
 	array_clear(&scope->shared);
 	ptr_set_clear(&scope->imported);
+	ptr_set_clear(&scope->exported);
 }
 
 i32 is_scope_an_ancestor(Scope *parent, Scope *child) {
@@ -625,6 +626,7 @@ void destroy_scope(Scope *scope) {
 	array_free(&scope->shared);
 	array_free(&scope->delayed_file_decls);
 	ptr_set_destroy(&scope->imported);
+	ptr_set_destroy(&scope->exported);
 
 	// NOTE(bill): No need to free scope as it "should" be allocated in an arena (except for the global scope)
 }
@@ -2290,6 +2292,7 @@ void add_import_dependency_node(Checker *c, AstNode *decl, Map<ImportGraphNode *
 		Scope *scope = *found;
 		GB_ASSERT(scope != nullptr);
 
+		id->file = scope->file;
 
 		ImportGraphNode **found_node = nullptr;
 		ImportGraphNode *m = nullptr;
@@ -2328,6 +2331,7 @@ void add_import_dependency_node(Checker *c, AstNode *decl, Map<ImportGraphNode *
 		}
 		Scope *scope = *found;
 		GB_ASSERT(scope != nullptr);
+		ed->file = scope->file;
 
 		ImportGraphNode **found_node = nullptr;
 		ImportGraphNode *m = nullptr;
@@ -2413,55 +2417,52 @@ Array<ImportGraphNode *> generate_import_dependency_graph(Checker *c) {
 	return G;
 }
 
-Array<Scope *> find_import_path(Map<Scope *> *file_scopes, Scope *start, Scope *end, PtrSet<Scope *> *visited = nullptr) {
-	PtrSet<Scope *> visited_ = {};
-	bool made_visited = false;
-	if (visited == nullptr) {
-		made_visited = true;
-		ptr_set_init(&visited_, heap_allocator());
-		visited = &visited_;
-	}
-	defer (if (made_visited) {
-		ptr_set_destroy(&visited_);
-	});
+struct ImportPathItem {
+	Scope *  scope;
+	AstNode *decl;
+};
 
-	Array<Scope *> empty_path = {};
+Array<ImportPathItem> find_import_path(Checker *c, Scope *start, Scope *end, PtrSet<Scope *> *visited) {
+	Array<ImportPathItem> empty_path = {};
 
 	if (ptr_set_exists(visited, start)) {
 		return empty_path;
 	}
 	ptr_set_add(visited, start);
 
+
 	String path = start->file->tokenizer.fullpath;
 	HashKey key = hash_string(path);
-	Scope **found = map_get(file_scopes, key);
+	Scope **found = map_get(&c->file_scopes, key);
 	if (found) {
-		Scope *scope = *found;
-		for_array(i, scope->exported.entries) {
-			Scope *dep = scope->exported.entries[i].ptr;
-			if (dep == end) {
-				Array<Scope *> path = {};
+		AstFile *f = (*found)->file;
+		GB_ASSERT(f != nullptr);
+
+		for_array(i, f->imports_and_exports) {
+			Scope *s = nullptr;
+			AstNode *decl = f->imports_and_exports[i];
+			if (decl->kind == AstNode_ExportDecl) {
+				s = decl->ExportDecl.file->scope;
+			} else if (decl->kind == AstNode_ImportDecl && decl->ImportDecl.is_using) {
+				s = decl->ImportDecl.file->scope;
+			} else {
+				continue;
+			}
+			GB_ASSERT(s != nullptr);
+
+			ImportPathItem item = {s, decl};
+			if (s == end) {
+				Array<ImportPathItem> path = {};
 				array_init(&path, heap_allocator());
-				array_add(&path, dep);
+				array_add(&path, item);
 				return path;
 			}
-			Array<Scope *> next_path = find_import_path(file_scopes, dep, end, visited);
+			Array<ImportPathItem> next_path = find_import_path(c, s, end, visited);
 			if (next_path.count > 0) {
-				array_add(&next_path, dep);
+				array_add(&next_path, item);
 				return next_path;
 			}
 		}
-
-		// NOTE(bill): Disallow importing of the same file
-		for_array(i, scope->imported.entries) {
-			Scope *dep = scope->imported.entries[i].ptr;
-			if (dep == start) {
-				Array<Scope *> path = {};
-				array_init(&path, heap_allocator());
-				array_add(&path, dep);
-				return path;
-			}
-		}
 	}
 	return empty_path;
 }
@@ -2839,31 +2840,33 @@ void check_import_entities(Checker *c) {
 		Scope *s = n->scope;
 
 		if (n->dep_count > 0) {
-			auto path = find_import_path(&c->file_scopes, s, s);
+			PtrSet<Scope *> visited = {};
+			ptr_set_init(&visited, heap_allocator());
+			defer (ptr_set_destroy(&visited));
+
+			auto path = find_import_path(c, s, s, &visited);
 			defer (array_free(&path));
 
 			// TODO(bill): This needs better TokenPos finding
-			auto const mt = [](Scope *s) -> Token {
-				Token token = {};
-				token.pos = token_pos(s->file->tokenizer.fullpath, 1, 1);
-				token.string = remove_directory_from_path(s->file->tokenizer.fullpath);
-				return token;
+			auto const fn = [](ImportPathItem item) -> String {
+				Scope *s = item.scope;
+				return remove_directory_from_path(s->file->tokenizer.fullpath);
 			};
 
 			if (path.count == 1) {
-				Scope *s = path[0];
-				Token token = mt(s);
-				error(token, "Self importation of `%.*s`", LIT(token.string));
+				ImportPathItem item = path[0];
+				String filename = fn(item);
+				error(item.decl, "Self importation of `%.*s`", LIT(filename));
 			} else if (path.count > 0) {
-				Scope *s = path[path.count-1];
-				Token token = mt(s);
-				error(token, "Cyclic importation of `%.*s`", LIT(token.string));
+				ImportPathItem item = path[path.count-1];
+				String filename = fn(item);
+				error(item.decl, "Cyclic importation of `%.*s`", LIT(filename));
 				for (isize i = 0; i < path.count; i++) {
-					gb_printf_err("\t`%.*s` refers to\n", LIT(token.string));
-					s = path[i];
-					token = mt(s);
+					error(item.decl, "`%.*s` refers to", LIT(filename));
+					item = path[i];
+					filename = fn(item);
 				}
-				gb_printf_err("\t`%.*s`\n", LIT(token.string));
+				error(item.decl, "`%.*s`", LIT(filename));
 			}
 
 		}

+ 16 - 7
src/parser.cpp

@@ -57,6 +57,8 @@ struct AstFile {
 	Array<AstNode *>    decls;
 	ImportedFileKind    file_kind;
 	bool                is_global_scope;
+	Array<AstNode *>    imports_and_exports; // `import` `using import` `export`
+
 
 	AstNode *           curr_proc;
 	isize               scope_level;
@@ -351,6 +353,7 @@ AST_NODE_KIND(_DeclBegin,      "", i32) \
 		CommentGroup     comment;      \
 	}) \
 	AST_NODE_KIND(ImportDecl, "import declaration", struct { \
+		AstFile *file;          \
 		Token    token;         \
 		Token    relpath;       \
 		String   fullpath;      \
@@ -361,6 +364,7 @@ AST_NODE_KIND(_DeclBegin,      "", i32) \
 		CommentGroup comment;   \
 	}) \
 	AST_NODE_KIND(ExportDecl, "export declaration", struct { \
+		AstFile *file;          \
 		Token    token;         \
 		Token    relpath;       \
 		String   fullpath;      \
@@ -1265,7 +1269,7 @@ AstNode *ast_range_stmt(AstFile *f, Token token, AstNode *value, AstNode *index,
 	return result;
 }
 
-AstNode *ast_match_stmt(AstFile *f, Token token, AstNode *init, AstNode *tag, AstNode *body) {
+AstNode *ast_switch_stmt(AstFile *f, Token token, AstNode *init, AstNode *tag, AstNode *body) {
 	AstNode *result = make_ast_node(f, AstNode_SwitchStmt);
 	result->SwitchStmt.token = token;
 	result->SwitchStmt.init  = init;
@@ -1275,7 +1279,7 @@ AstNode *ast_match_stmt(AstFile *f, Token token, AstNode *init, AstNode *tag, As
 }
 
 
-AstNode *ast_type_match_stmt(AstFile *f, Token token, AstNode *tag, AstNode *body) {
+AstNode *ast_type_switch_stmt(AstFile *f, Token token, AstNode *tag, AstNode *body) {
 	AstNode *result = make_ast_node(f, AstNode_TypeSwitchStmt);
 	result->TypeSwitchStmt.token = token;
 	result->TypeSwitchStmt.tag   = tag;
@@ -4240,7 +4244,7 @@ AstNode *parse_case_clause(AstFile *f, bool is_type) {
 }
 
 
-AstNode *parse_match_stmt(AstFile *f) {
+AstNode *parse_switch_stmt(AstFile *f) {
 	if (f->curr_proc == nullptr) {
 		syntax_error(f->curr_token, "You cannot use a match statement in the file scope");
 		return ast_bad_stmt(f, f->curr_token, f->curr_token);
@@ -4293,9 +4297,9 @@ AstNode *parse_match_stmt(AstFile *f) {
 
 	if (!is_type_match) {
 		tag = convert_stmt_to_expr(f, tag, str_lit("match expression"));
-		return ast_match_stmt(f, token, init, tag, body);
+		return ast_switch_stmt(f, token, init, tag, body);
 	} else {
-		return ast_type_match_stmt(f, token, tag, body);
+		return ast_type_switch_stmt(f, token, tag, body);
 	}
 }
 
@@ -4380,6 +4384,7 @@ AstNode *parse_import_decl(AstFile *f, bool is_using) {
 		s = ast_bad_decl(f, import_name, file_path);
 	} else {
 		s = ast_import_decl(f, token, is_using, file_path, import_name, cond, docs, f->line_comment);
+		array_add(&f->imports_and_exports, s);
 	}
 	expect_semicolon(f, s);
 	return s;
@@ -4401,6 +4406,7 @@ AstNode *parse_export_decl(AstFile *f) {
 		s = ast_bad_decl(f, token, file_path);
 	} else {
 		s = ast_export_decl(f, token, file_path, cond, docs, f->line_comment);
+		array_add(&f->imports_and_exports, s);
 	}
 	expect_semicolon(f, s);
 	return s;
@@ -4495,10 +4501,10 @@ AstNode *parse_stmt(AstFile *f) {
 	case Token_if:     return parse_if_stmt(f);
 	case Token_when:   return parse_when_stmt(f);
 	case Token_for:    return parse_for_stmt(f);
-	case Token_switch:  return parse_match_stmt(f);
+	case Token_switch: return parse_switch_stmt(f);
 	case Token_defer:  return parse_defer_stmt(f);
-	case Token_asm:    return parse_asm_stmt(f);
 	case Token_return: return parse_return_stmt(f);
+	case Token_asm:    return parse_asm_stmt(f);
 
 	case Token_break:
 	case Token_continue:
@@ -4726,6 +4732,7 @@ ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
 	arena_size *= 2*f->tokens.count;
 	gb_arena_init_from_allocator(&f->arena, heap_allocator(), arena_size);
 	array_init(&f->comments, heap_allocator());
+	array_init(&f->imports_and_exports, heap_allocator());
 
 	f->curr_proc = nullptr;
 
@@ -4735,6 +4742,8 @@ ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
 void destroy_ast_file(AstFile *f) {
 	gb_arena_free(&f->arena);
 	array_free(&f->tokens);
+	array_free(&f->comments);
+	array_free(&f->imports_and_exports);
 	gb_free(heap_allocator(), f->tokenizer.fullpath.text);
 	destroy_tokenizer(&f->tokenizer);
 }

+ 18 - 0
src/types.cpp

@@ -1120,6 +1120,24 @@ bool are_types_identical(Type *x, Type *y) {
 		}
 		break;
 
+	case Type_BitField:
+		if (y->kind == Type_BitField) {
+			if (x->BitField.field_count == y->BitField.field_count &&
+			    x->BitField.custom_align == y->BitField.custom_align) {
+				for (i32 i = 0; i < x->BitField.field_count; i++) {
+					if (x->BitField.offsets[i] != y->BitField.offsets[i]) {
+						return false;
+					}
+					if (x->BitField.sizes[i] != y->BitField.sizes[i]) {
+						return false;
+					}
+				}
+
+				return true;
+			}
+		}
+		break;
+
 
 	case Type_Enum:
 		return x == y; // NOTE(bill): All enums are unique