package odin_html_docs import doc "core:odin/doc-format" import "core:fmt" import "core:io" import "core:os" import "core:strings" import "core:path/slashpath" import "core:sort" import "core:slice" import "core:time" GITHUB_LICENSE_URL :: "https://github.com/odin-lang/Odin/tree/master/LICENSE" GITHUB_CORE_URL :: "https://github.com/odin-lang/Odin/tree/master/core" GITHUB_VENDOR_URL :: "https://github.com/odin-lang/Odin/tree/master/vendor" BASE_CORE_URL :: "/core" BASE_VENDOR_URL :: "/vendor" header: ^doc.Header files: []doc.File pkgs: []doc.Pkg entities: []doc.Entity types: []doc.Type core_pkgs_to_use: map[string]^doc.Pkg // trimmed path vendor_pkgs_to_use: map[string]^doc.Pkg // trimmed path pkg_to_path: map[^doc.Pkg]string // trimmed path pkg_to_collection: map[^doc.Pkg]^Collection Collection :: struct { name: string, pkgs_to_use: ^map[string]^doc.Pkg, github_url: string, base_url: string, root: ^Dir_Node, } array :: proc(a: $A/doc.Array($T)) -> []T { return doc.from_array(header, a) } str :: proc(s: $A/doc.String) -> string { return doc.from_string(header, s) } errorf :: proc(format: string, args: ..any) -> ! { fmt.eprintf("%s ", os.args[0]) fmt.eprintf(format, ..args) fmt.eprintln() os.exit(1) } base_type :: proc(t: doc.Type) -> doc.Type { t := t for { if t.kind != .Named { break } t = types[array(t.types)[0]] } return t } is_type_untyped :: proc(type: doc.Type) -> bool { if type.kind == .Basic { flags := transmute(doc.Type_Flags_Basic)type.flags return .Untyped in flags } return false } common_prefix :: proc(strs: []string) -> string { if len(strs) == 0 { return "" } n := max(int) for str in strs { n = min(n, len(str)) } prefix := strs[0][:n] for str in strs[1:] { for len(prefix) != 0 && str[:len(prefix)] != prefix { prefix = prefix[:len(prefix)-1] } if len(prefix) == 0 { break } } return prefix } recursive_make_directory :: proc(path: string, prefix := "") { head, _, tail := strings.partition(path, "/") path_to_make := head if prefix != "" { path_to_make = fmt.tprintf("%s/%s", prefix, head) } os.make_directory(path_to_make, 0) if tail != "" { recursive_make_directory(tail, path_to_make) } } write_html_header :: proc(w: io.Writer, title: string) { fmt.wprintf(w, string(#load("header.txt.html")), title) io.write(w, #load("header-lower.txt.html")) } write_html_footer :: proc(w: io.Writer, include_directory_js: bool) { fmt.wprintf(w, "\n") io.write(w, #load("footer.txt.html")) if false && include_directory_js { io.write_string(w, ` `) } fmt.wprintf(w, "\n\n") } main :: proc() { if len(os.args) != 2 { errorf("expected 1 .odin-doc file") } data, ok := os.read_entire_file(os.args[1]) if !ok { errorf("unable to read file:", os.args[1]) } err: doc.Reader_Error header, err = doc.read_from_bytes(data) switch err { case .None: case .Header_Too_Small: errorf("file is too small for the file format") case .Invalid_Magic: errorf("invalid magic for the file format") case .Data_Too_Small: errorf("data is too small for the file format") case .Invalid_Version: errorf("invalid file format version") } files = array(header.files) pkgs = array(header.pkgs) entities = array(header.entities) types = array(header.types) core_collection := &Collection{ "Core", &core_pkgs_to_use, GITHUB_CORE_URL, BASE_CORE_URL, nil, } vendor_collection := &Collection{ "Vendor", &vendor_pkgs_to_use, GITHUB_VENDOR_URL, BASE_VENDOR_URL, nil, } { fullpaths: [dynamic]string defer delete(fullpaths) for pkg in pkgs[1:] { append(&fullpaths, str(pkg.fullpath)) } path_prefix := common_prefix(fullpaths[:]) core_pkgs_to_use = make(map[string]^doc.Pkg) vendor_pkgs_to_use = make(map[string]^doc.Pkg) fullpath_loop: for fullpath, i in fullpaths { path := strings.trim_prefix(fullpath, path_prefix) pkg := &pkgs[i+1] if len(array(pkg.entities)) == 0 { continue fullpath_loop } switch { case strings.has_prefix(path, "core/"): trimmed_path := strings.trim_prefix(path, "core/") if strings.has_prefix(trimmed_path, "sys") { continue fullpath_loop } core_pkgs_to_use[trimmed_path] = pkg case strings.has_prefix(path, "vendor/"): trimmed_path := strings.trim_prefix(path, "vendor/") if strings.contains(trimmed_path, "/bindings") { continue fullpath_loop } vendor_pkgs_to_use[trimmed_path] = pkg } } for path, pkg in core_pkgs_to_use { pkg_to_path[pkg] = path pkg_to_collection[pkg] = core_collection } for path, pkg in vendor_pkgs_to_use { pkg_to_path[pkg] = path pkg_to_collection[pkg] = vendor_collection } } b := strings.make_builder() defer strings.destroy_builder(&b) w := strings.to_writer(&b) { strings.reset_builder(&b) write_html_header(w, "Packages - pkg.odin-lang.org") write_home_page(w) write_html_footer(w, true) os.write_entire_file("index.html", b.buf[:]) } core_collection.root = generate_directory_tree(core_pkgs_to_use) vendor_collection.root = generate_directory_tree(vendor_pkgs_to_use) generate_packages(&b, core_collection, "core") generate_packages(&b, vendor_collection, "vendor") } generate_packages :: proc(b: ^strings.Builder, collection: ^Collection, dir: string) { w := strings.to_writer(b) { strings.reset_builder(b) write_html_header(w, fmt.tprintf("%s library - pkg.odin-lang.org", dir)) write_collection_directory(w, collection) write_html_footer(w, true) os.make_directory(dir, 0) os.write_entire_file(fmt.tprintf("%s/index.html", dir), b.buf[:]) } for path, pkg in collection.pkgs_to_use { strings.reset_builder(b) write_html_header(w, fmt.tprintf("package %s - pkg.odin-lang.org", path)) write_pkg(w, path, pkg, collection) write_html_footer(w, false) recursive_make_directory(path, dir) os.write_entire_file(fmt.tprintf("%s/%s/index.html", dir, path), b.buf[:]) } } write_home_sidebar :: proc(w: io.Writer) { fmt.wprintln(w, ``) fmt.wprintln(w, `
`) defer fmt.wprintln(w, `
`) fmt.wprintln(w, ``) } write_home_page :: proc(w: io.Writer) { fmt.wprintln(w, `
`) defer fmt.wprintln(w, `
`) write_home_sidebar(w) fmt.wprintln(w, `
`) defer fmt.wprintln(w, `
`) fmt.wprintln(w, "
") fmt.wprintln(w, "

Odin Packages

") fmt.wprintln(w, "
") fmt.wprintln(w, "
") defer fmt.wprintln(w, "
") fmt.wprintln(w, `
`) fmt.wprintln(w, `

Core Library Collection

`) fmt.wprintln(w, `

Documentation for all the packages part of the core library collection.

`) fmt.wprintln(w, `
`) fmt.wprintln(w, `
`) fmt.wprintln(w, `

Vendor Library Collection

`) fmt.wprintln(w, `

Documentation for all the packages part of the vendor library collection.

`) fmt.wprintln(w, `
`) } Dir_Node :: struct { dir: string, path: string, name: string, pkg: ^doc.Pkg, children: [dynamic]^Dir_Node, } generate_directory_tree :: proc(pkgs_to_use: map[string]^doc.Pkg) -> (root: ^Dir_Node) { sort_tree :: proc(node: ^Dir_Node) { slice.sort_by_key(node.children[:], proc(node: ^Dir_Node) -> string {return node.name}) for child in node.children { sort_tree(child) } } root = new(Dir_Node) root.children = make([dynamic]^Dir_Node) children := make([dynamic]^Dir_Node) for path, pkg in pkgs_to_use { dir, _, inner := strings.partition(path, "/") if inner == "" { node := new_clone(Dir_Node{ dir = dir, name = dir, path = path, pkg = pkg, }) append(&root.children, node) } else { node := new_clone(Dir_Node{ dir = dir, name = inner, path = path, pkg = pkg, }) append(&children, node) } } child_loop: for child in children { dir, _, inner := strings.partition(child.path, "/") for node in root.children { if node.dir == dir { append(&node.children, child) continue child_loop } } parent := new_clone(Dir_Node{ dir = dir, name = dir, path = dir, pkg = nil, }) append(&root.children, parent) append(&parent.children, child) } sort_tree(root) return } write_collection_directory :: proc(w: io.Writer, collection: ^Collection) { get_line_doc :: proc(pkg: ^doc.Pkg) -> (line_doc: string, ok: bool) { if pkg == nil { return } line_doc, _, _ = strings.partition(str(pkg.docs), "\n") line_doc = strings.trim_space(line_doc) if line_doc == "" { return } switch { case strings.has_prefix(line_doc, "*"): return "", false case strings.has_prefix(line_doc, "Copyright"): return "", false } return line_doc, true } fmt.wprintln(w, `
`) defer fmt.wprintln(w, `
`) write_home_sidebar(w) fmt.wprintln(w, `
`) defer fmt.wprintln(w, `
`) { fmt.wprintln(w, `
`) fmt.wprintln(w, `
`) fmt.wprintf(w, "

%s Library Collection

\n", collection.name) fmt.wprintln(w, "") fmt.wprintln(w, "
") fmt.wprintln(w, "
") fmt.wprintln(w, `
`) } fmt.wprintln(w, "
") fmt.wprintln(w, `

Directories

`) fmt.wprintln(w, "
") fmt.wprintln(w, "
") fmt.wprintln(w, "\t") fmt.wprintln(w, "\t\t") for dir in collection.root.children { if len(dir.children) != 0 { fmt.wprint(w, ``) io.write_string(w, ``) fmt.wprintf(w, "\n") for child in dir.children { assert(child.pkg != nil) fmt.wprintf(w, ``) line_doc, _, _ := strings.partition(str(child.pkg.docs), "\n") line_doc = strings.trim_space(line_doc) io.write_string(w, ``) fmt.wprintf(w, "") fmt.wprintf(w, "\n") } } fmt.wprintln(w, "\t\t") fmt.wprintln(w, "\t
`, dir.dir) } else { fmt.wprintf(w, `
`, dir.dir) } if dir.pkg != nil { fmt.wprintf(w, `%s`, collection.base_url, dir.path, dir.name) } else { fmt.wprintf(w, "%s", dir.name) } io.write_string(w, ``) if line_doc, ok := get_line_doc(dir.pkg); ok { write_doc_line(w, line_doc) } else { io.write_string(w, ` `) } io.write_string(w, `
`, str(child.pkg.name)) fmt.wprintf(w, `%s`, collection.base_url, child.path, child.name) io.write_string(w, ``) if line_doc, ok := get_line_doc(child.pkg); ok { write_doc_line(w, line_doc) } else { io.write_string(w, ` `) } io.write_string(w, `
") fmt.wprintln(w, "
") } is_entity_blank :: proc(e: doc.Entity_Index) -> bool { name := str(entities[e].name) return name == "" } write_where_clauses :: proc(w: io.Writer, where_clauses: []doc.String) { if len(where_clauses) != 0 { io.write_string(w, " where ") for clause, i in where_clauses { if i > 0 { io.write_string(w, ", ") } io.write_string(w, str(clause)) } } } Write_Type_Flag :: enum { Is_Results, Variadic, Allow_Indent, Poly_Names, } Write_Type_Flags :: distinct bit_set[Write_Type_Flag] Type_Writer :: struct { w: io.Writer, pkg: doc.Pkg_Index, indent: int, generic_scope: map[string]bool, } write_type :: proc(using writer: ^Type_Writer, type: doc.Type, flags: Write_Type_Flags) { write_param_entity :: proc(using writer: ^Type_Writer, e, next_entity: ^doc.Entity, flags: Write_Type_Flags, name_width := 0) { name := str(e.name) write_padding :: proc(w: io.Writer, name: string, name_width: int) { for _ in 0..`, BASE_CORE_URL) io.write_string(w, init_string) io.write_string(w, ``) case strings.has_prefix(init_string, "context."): io.write_string(w, name) io.write_string(w, " := ") fmt.wprintf(w, ``, BASE_CORE_URL) io.write_string(w, init_string) io.write_string(w, ``) case: the_type := types[e.type] type_flags := flags - {.Is_Results} if .Param_Ellipsis in e.flags { type_flags += {.Variadic} } #partial switch e.kind { case .Constant: assert(name != "") io.write_byte(w, '$') io.write_string(w, name) if name != "" && init_string == "" && next_entity != nil && e.field_group_index >= 0 { if e.field_group_index == next_entity.field_group_index && e.type == next_entity.type { return } } generic_scope[name] = true if !is_type_untyped(the_type) { io.write_string(w, ": ") write_padding(w, name, name_width) write_type(writer, the_type, type_flags) io.write_string(w, " = ") io.write_string(w, init_string) } else { io.write_string(w, " := ") io.write_string(w, init_string) } return case .Variable: if name != "" && init_string == "" && next_entity != nil && e.field_group_index >= 0 { if e.field_group_index == next_entity.field_group_index && e.type == next_entity.type { io.write_string(w, name) return } } if name != "" { io.write_string(w, name) io.write_string(w, ": ") write_padding(w, name, name_width) } write_type(writer, the_type, type_flags) case .Type_Name: io.write_byte(w, '$') io.write_string(w, name) generic_scope[name] = true io.write_string(w, ": ") write_padding(w, name, name_width) if the_type.kind == .Generic { io.write_string(w, "typeid") if ts := array(the_type.types); len(ts) == 1 { io.write_byte(w, '/') write_type(writer, types[ts[0]], type_flags) } } else { write_type(writer, the_type, type_flags) } } if init_string != "" { io.write_string(w, " = ") io.write_string(w, init_string) } } } write_poly_params :: proc(using writer: ^Type_Writer, type: doc.Type, flags: Write_Type_Flags) { if type.polymorphic_params != 0 { io.write_byte(w, '(') write_type(writer, types[type.polymorphic_params], flags+{.Poly_Names}) io.write_byte(w, ')') } write_where_clauses(w, array(type.where_clauses)) } do_indent :: proc(using writer: ^Type_Writer, flags: Write_Type_Flags) { if .Allow_Indent not_in flags { return } for _ in 0.. (name_width: int) { for entity_index in type_entities { e := &entities[entity_index] name := str(e.name) name_width = max(len(name), name_width) } return } type_entities := array(type.entities) type_types := array(type.types) switch type.kind { case .Invalid: // ignore case .Basic: type_flags := transmute(doc.Type_Flags_Basic)type.flags if is_type_untyped(type) { io.write_string(w, str(type.name)) } else { fmt.wprintf(w, `%s`, str(type.name)) } case .Named: e := entities[type_entities[0]] name := str(type.name) tn_pkg := files[e.pos.file].pkg collection: Collection // TODO determine this from package if tn_pkg != pkg { fmt.wprintf(w, `%s.`, str(pkgs[tn_pkg].name)) } if .Private in e.flags { io.write_string(w, name) } else if n := strings.contains_rune(name, '('); n >= 0 { fmt.wprintf(w, `{1:s}`, pkg_to_path[&pkgs[tn_pkg]], name[:n], collection.base_url) io.write_string(w, name[n:]) } else { fmt.wprintf(w, `{1:s}`, pkg_to_path[&pkgs[tn_pkg]], name, collection.base_url) } case .Generic: name := str(type.name) if name not_in generic_scope { io.write_byte(w, '$') } io.write_string(w, name) if name not_in generic_scope && len(array(type.types)) == 1 { io.write_byte(w, '/') write_type(writer, types[type_types[0]], flags) } case .Pointer: io.write_byte(w, '^') write_type(writer, types[type_types[0]], flags) case .Array: assert(type.elem_count_len == 1) io.write_byte(w, '[') io.write_uint(w, uint(type.elem_counts[0])) io.write_byte(w, ']') write_type(writer, types[type_types[0]], flags) case .Enumerated_Array: io.write_byte(w, '[') write_type(writer, types[type_types[0]], flags) io.write_byte(w, ']') write_type(writer, types[type_types[1]], flags) case .Slice: if .Variadic in flags { io.write_string(w, "..") } else { io.write_string(w, "[]") } write_type(writer, types[type_types[0]], flags - {.Variadic}) case .Dynamic_Array: io.write_string(w, "[dynamic]") write_type(writer, types[type_types[0]], flags) case .Map: io.write_string(w, "map[") write_type(writer, types[type_types[0]], flags) io.write_byte(w, ']') write_type(writer, types[type_types[1]], flags) case .Struct: type_flags := transmute(doc.Type_Flags_Struct)type.flags io.write_string(w, "struct") write_poly_params(writer, type, flags) if .Packed in type_flags { io.write_string(w, " #packed") } if .Raw_Union in type_flags { io.write_string(w, " #raw_union") } if custom_align := str(type.custom_align); custom_align != "" { io.write_string(w, " #align") io.write_string(w, custom_align) } io.write_string(w, " {") tags := array(type.tags) if len(type_entities) != 0 { do_newline(writer, flags) indent += 1 name_width := calc_name_width(type_entities) for entity_index, i in type_entities { e := &entities[entity_index] next_entity: ^doc.Entity = nil if i+1 < len(type_entities) { next_entity = &entities[type_entities[i+1]] } do_indent(writer, flags) write_param_entity(writer, e, next_entity, flags, name_width) if tag := str(tags[i]); tag != "" { io.write_byte(w, ' ') io.write_quoted_string(w, tag) } io.write_byte(w, ',') do_newline(writer, flags) } indent -= 1 do_indent(writer, flags) } io.write_string(w, "}") case .Union: type_flags := transmute(doc.Type_Flags_Union)type.flags io.write_string(w, "union") write_poly_params(writer, type, flags) if .No_Nil in type_flags { io.write_string(w, " #no_nil") } if .Maybe in type_flags { io.write_string(w, " #maybe") } if custom_align := str(type.custom_align); custom_align != "" { io.write_string(w, " #align") io.write_string(w, custom_align) } io.write_string(w, " {") if len(type_types) > 1 { do_newline(writer, flags) indent += 1 for type_index in type_types { do_indent(writer, flags) write_type(writer, types[type_index], flags) io.write_string(w, ", ") do_newline(writer, flags) } indent -= 1 do_indent(writer, flags) } io.write_string(w, "}") case .Enum: io.write_string(w, "enum") if len(type_types) != 0 { io.write_byte(w, ' ') write_type(writer, types[type_types[0]], flags) } io.write_string(w, " {") do_newline(writer, flags) indent += 1 name_width := calc_name_width(type_entities) for entity_index in type_entities { e := &entities[entity_index] name := str(e.name) do_indent(writer, flags) io.write_string(w, name) if init_string := str(e.init_string); init_string != "" { for _ in 0.. 1 || !is_entity_blank(type_entities[0])) if require_parens { io.write_byte(w, '(') } for entity_index, i in type_entities { if i > 0 { io.write_string(w, ", ") } e := &entities[entity_index] next_entity: ^doc.Entity = nil if i+1 < len(type_entities) { next_entity = &entities[type_entities[i+1]] } write_param_entity(writer, e, next_entity, flags) } if require_parens { io.write_byte(w, ')') } case .Proc: type_flags := transmute(doc.Type_Flags_Proc)type.flags io.write_string(w, "proc") cc := str(type.calling_convention) if cc != "" { io.write_byte(w, ' ') io.write_quoted_string(w, cc) io.write_byte(w, ' ') } params := array(type.types)[0] results := array(type.types)[1] io.write_byte(w, '(') write_type(writer, types[params], flags) io.write_byte(w, ')') if results != 0 { assert(.Diverging not_in type_flags) io.write_string(w, " -> ") write_type(writer, types[results], flags+{.Is_Results}) } if .Diverging in type_flags { io.write_string(w, " -> !") } if .Optional_Ok in type_flags { io.write_string(w, " #optional_ok") } case .Bit_Set: type_flags := transmute(doc.Type_Flags_Bit_Set)type.flags io.write_string(w, "bit_set[") if .Op_Lt in type_flags { io.write_uint(w, uint(type.elem_counts[0])) io.write_string(w, "..<") io.write_uint(w, uint(type.elem_counts[1])) } else if .Op_Lt_Eq in type_flags { io.write_uint(w, uint(type.elem_counts[0])) io.write_string(w, "..=") io.write_uint(w, uint(type.elem_counts[1])) } else { write_type(writer, types[type_types[0]], flags) } if .Underlying_Type in type_flags { write_type(writer, types[type_types[1]], flags) } io.write_string(w, "]") case .Simd_Vector: io.write_string(w, "#simd[") io.write_uint(w, uint(type.elem_counts[0])) io.write_byte(w, ']') case .SOA_Struct_Fixed: io.write_string(w, "#soa[") io.write_uint(w, uint(type.elem_counts[0])) io.write_byte(w, ']') case .SOA_Struct_Slice: io.write_string(w, "#soa[]") case .SOA_Struct_Dynamic: io.write_string(w, "#soa[dynamic]") case .Relative_Pointer: io.write_string(w, "#relative(") write_type(writer, types[type_types[1]], flags) io.write_string(w, ") ") write_type(writer, types[type_types[0]], flags) case .Relative_Slice: io.write_string(w, "#relative(") write_type(writer, types[type_types[1]], flags) io.write_string(w, ") ") write_type(writer, types[type_types[0]], flags) case .Multi_Pointer: io.write_string(w, "[^]") write_type(writer, types[type_types[0]], flags) case .Matrix: io.write_string(w, "matrix[") io.write_uint(w, uint(type.elem_counts[0])) io.write_string(w, ", ") io.write_uint(w, uint(type.elem_counts[1])) io.write_string(w, "]") write_type(writer, types[type_types[0]], flags) } } write_doc_line :: proc(w: io.Writer, text: string) { text := text for len(text) != 0 { if strings.count(text, "`") >= 2 { n := strings.index_byte(text, '`') io.write_string(w, text[:n]) io.write_string(w, "") remaining := text[n+1:] m := strings.index_byte(remaining, '`') io.write_string(w, remaining[:m]) io.write_string(w, "") text = remaining[m+1:] } else { io.write_string(w, text) return } } } write_doc_sidebar :: proc(w: io.Writer) { } write_docs :: proc(w: io.Writer, pkg: ^doc.Pkg, docs: string) { if docs == "" { return } Block_Kind :: enum { Paragraph, Code, } Block :: struct { kind: Block_Kind, lines: []string, } lines: [dynamic]string it := docs for line_ in strings.split_iterator(&it, "\n") { line := strings.trim_right_space(line_) append(&lines, line) } curr_block_kind := Block_Kind.Paragraph start := 0 blocks: [dynamic]Block for line, i in lines { text := strings.trim_space(line) switch curr_block_kind { case .Paragraph: if strings.has_prefix(line, "\t") { if i-start > 0 { append(&blocks, Block{curr_block_kind, lines[start:i]}) } curr_block_kind, start = .Code, i } else if text == "" { if i-start > 0 { append(&blocks, Block{curr_block_kind, lines[start:i]}) } start = i } case .Code: if text == "" || strings.has_prefix(line, "\t") { continue } if i-start > 0 { append(&blocks, Block{curr_block_kind, lines[start:i]}) } curr_block_kind, start = .Paragraph, i } } if start < len(lines) { if len(lines)-start > 0 { append(&blocks, Block{curr_block_kind, lines[start:]}) } } for block in &blocks { trim_amount := 0 for trim_amount = 0; trim_amount < len(block.lines); trim_amount += 1 { line := block.lines[trim_amount] if strings.trim_space(line) != "" { break } } block.lines = block.lines[trim_amount:] } for block, i in blocks { if len(block.lines) == 0 { continue } prev_line := "" if i > 0 { prev_lines := blocks[i-1].lines if len(prev_lines) > 0 { prev_line = prev_lines[len(prev_lines)-1] } } prev_line = strings.trim_space(prev_line) lines := block.lines[:] end_line := block.lines[len(lines)-1] if block.kind == .Paragraph && i+1 < len(blocks) { if strings.has_prefix(end_line, "Example:") && blocks[i+1].kind == .Code { lines = lines[:len(lines)-1] } } switch block.kind { case .Paragraph: io.write_string(w, "

") for line, line_idx in lines { if line_idx > 0 { io.write_string(w, "\n") } io.write_string(w, line) } io.write_string(w, "

\n") case .Code: all_blank := len(lines) > 0 for line in lines { if strings.trim_space(line) != "" { all_blank = false } } if all_blank { continue } if strings.has_prefix(prev_line, "Example:") { io.write_string(w, "
\n") defer io.write_string(w, "
\n") io.write_string(w, "Example:\n") io.write_string(w, `
`)
				for line in lines {
					io.write_string(w, strings.trim_prefix(line, "\t"))
					io.write_string(w, "\n")
				}
				io.write_string(w, "
\n") } else { io.write_string(w, "
")
				for line in lines {
					io.write_string(w, strings.trim_prefix(line, "\t"))
					io.write_string(w, "\n")
				}
				io.write_string(w, "
\n") } } } } write_pkg_sidebar :: proc(w: io.Writer, curr_pkg: ^doc.Pkg, collection: ^Collection) { fmt.wprintln(w, ``) fmt.wprintln(w, `
`) defer fmt.wprintln(w, `
`) fmt.wprintf(w, "

%s Library

\n", collection.name) fmt.wprintln(w, `
    `) defer fmt.wprintln(w, `
`) for dir in collection.root.children { fmt.wprint(w, ``) if dir.pkg == curr_pkg { fmt.wprintf(w, `%s`, collection.base_url, dir.path, dir.name) } else if dir.pkg != nil { fmt.wprintf(w, `%s`, collection.base_url, dir.path, dir.name) } else { fmt.wprintf(w, "%s", dir.name) } if len(dir.children) != 0 { fmt.wprintln(w, "
    ") defer fmt.wprintln(w, "
\n") for child in dir.children { fmt.wprint(w, `
  • `) defer fmt.wprintln(w, `
  • `) if child.pkg == curr_pkg { fmt.wprintf(w, `%s`, collection.base_url, child.path, child.name) } else if child.pkg != nil { fmt.wprintf(w, `%s`, collection.base_url, child.path, child.name) } else { fmt.wprintf(w, "%s", child.name) } } } } } write_pkg :: proc(w: io.Writer, path: string, pkg: ^doc.Pkg, collection: ^Collection) { fmt.wprintln(w, `
    `) defer fmt.wprintln(w, `
    `) write_pkg_sidebar(w, pkg, collection) fmt.wprintln(w, `
    `) if false { // breadcrumbs fmt.wprintln(w, `
    `) defer fmt.wprintln(w, `
    `) fmt.wprintln(w, ``) io.write_string(w, "
      \n") defer io.write_string(w, "
    \n") fmt.wprintf(w, ``, collection.base_url) dirs := strings.split(path, "/") for dir, i in dirs { url := strings.join(dirs[:i+1], "/") short_path := strings.join(dirs[1:i+1], "/") a_class := "breadcrumb-link" is_curr := i+1 == len(dirs) if is_curr { io.write_string(w, `\n") } } fmt.wprintf(w, "

    package core:%s

    \n", path) overview_docs := strings.trim_space(str(pkg.docs)) if overview_docs != "" { fmt.wprintln(w, "

    Overview

    ") fmt.wprintln(w, "
    ") defer fmt.wprintln(w, "
    ") write_docs(w, pkg, overview_docs) } fmt.wprintln(w, `

    Index

    `) fmt.wprintln(w, `
    `) pkg_procs: [dynamic]^doc.Entity pkg_proc_groups: [dynamic]^doc.Entity pkg_types: [dynamic]^doc.Entity pkg_vars: [dynamic]^doc.Entity pkg_consts: [dynamic]^doc.Entity for entity_index in array(pkg.entities) { e := &entities[entity_index] name := str(e.name) if name == "" || name[0] == '_' { continue } switch e.kind { case .Invalid, .Import_Name, .Library_Name: // ignore case .Constant: append(&pkg_consts, e) case .Variable: append(&pkg_vars, e) case .Type_Name: append(&pkg_types, e) case .Procedure: append(&pkg_procs, e) case .Proc_Group: append(&pkg_proc_groups, e) } } entity_key :: proc(e: ^doc.Entity) -> string { return str(e.name) } slice.sort_by_key(pkg_procs[:], entity_key) slice.sort_by_key(pkg_proc_groups[:], entity_key) slice.sort_by_key(pkg_types[:], entity_key) slice.sort_by_key(pkg_vars[:], entity_key) slice.sort_by_key(pkg_consts[:], entity_key) write_index :: proc(w: io.Writer, name: string, entities: []^doc.Entity) { fmt.wprintln(w, `
    `) defer fmt.wprintln(w, `
    `) fmt.wprintf(w, `
    `+"\n", name) fmt.wprintf(w, ``+"\n", name) io.write_string(w, name) fmt.wprintln(w, ``) defer fmt.wprintln(w, `
    `) if len(entities) == 0 { io.write_string(w, "

    This section is empty.

    \n") } else { fmt.wprintln(w, "
      ") for e in entities { name := str(e.name) fmt.wprintf(w, "
    • {0:s}
    • \n", name) } fmt.wprintln(w, "
    ") } } entity_ordering := [?]struct{name: string, entities: []^doc.Entity} { {"Types", pkg_types[:]}, {"Constants", pkg_consts[:]}, {"Variables", pkg_vars[:]}, {"Procedures", pkg_procs[:]}, {"Procedure Groups", pkg_proc_groups[:]}, } for eo in entity_ordering { write_index(w, eo.name, eo.entities) } fmt.wprintln(w, "
    ") write_entity :: proc(w: io.Writer, e: ^doc.Entity) { write_attributes :: proc(w: io.Writer, e: ^doc.Entity) { for attr in array(e.attributes) { io.write_string(w, "@(") name := str(attr.name) value := str(attr.value) io.write_string(w, name) if value != "" { io.write_string(w, "=") io.write_string(w, value) } io.write_string(w, ")\n") } } pkg_index := files[e.pos.file].pkg pkg := &pkgs[pkg_index] writer := &Type_Writer{ w = w, pkg = pkg_index, } defer delete(writer.generic_scope) collection := pkg_to_collection[pkg] github_url := collection.github_url if collection != nil else GITHUB_CORE_URL name := str(e.name) path := pkg_to_path[pkg] filename := slashpath.base(str(files[e.pos.file].name)) fmt.wprintf(w, "

    {0:s}", name) fmt.wprintf(w, " ¶") if e.pos.file != 0 && e.pos.line > 0 { src_url := fmt.tprintf("%s/%s/%s#L%d", github_url, path, filename, e.pos.line) fmt.wprintf(w, "", src_url) } fmt.wprintf(w, "

    \n") fmt.wprintln(w, `
    `) switch e.kind { case .Invalid, .Import_Name, .Library_Name: // ignore case .Constant: fmt.wprint(w, `
    `)
    			the_type := types[e.type]
    
    			init_string := str(e.init_string)
    			assert(init_string != "")
    
    			ignore_type := true
    			if the_type.kind == .Basic && is_type_untyped(the_type) {
    			} else {
    				ignore_type = false
    				type_name := str(the_type.name)
    				if type_name != "" && strings.has_prefix(init_string, type_name) {
    					ignore_type = true
    				}
    			}
    
    			if ignore_type {
    				fmt.wprintf(w, "%s :: ", name)
    			} else {
    				fmt.wprintf(w, "%s: ", name)
    				write_type(writer, the_type, {.Allow_Indent})
    				fmt.wprintf(w, " : ")
    			}
    
    
    			io.write_string(w, init_string)
    			fmt.wprintln(w, "
    ") case .Variable: fmt.wprint(w, `
    `)
    			write_attributes(w, e)
    			fmt.wprintf(w, "%s: ", name)
    			write_type(writer, types[e.type], {.Allow_Indent})
    			init_string := str(e.init_string)
    			if init_string != "" {
    				io.write_string(w, " = ")
    				io.write_string(w, "…")
    			}
    			fmt.wprintln(w, "
    ") case .Type_Name: fmt.wprint(w, `
    `)
    			fmt.wprintf(w, "%s :: ", name)
    			the_type := types[e.type]
    			type_to_print := the_type
    			if the_type.kind == .Named && .Type_Alias not_in e.flags {
    				if e.pos == entities[array(the_type.entities)[0]].pos {
    					bt := base_type(the_type)
    					#partial switch bt.kind {
    					case .Struct, .Union, .Proc, .Enum:
    						// Okay
    					case:
    						io.write_string(w, "distinct ")
    					}
    					type_to_print = bt
    				}
    			}
    			write_type(writer, type_to_print, {.Allow_Indent})
    			fmt.wprintln(w, "
    ") case .Procedure: fmt.wprint(w, `
    `)
    			fmt.wprintf(w, "%s :: ", name)
    			write_type(writer, types[e.type], nil)
    			write_where_clauses(w, array(e.where_clauses))
    			fmt.wprint(w, " {…}")
    			fmt.wprintln(w, "
    ") case .Proc_Group: fmt.wprint(w, `
    `)
    			fmt.wprintf(w, "%s :: proc{{\n", name)
    			for entity_index in array(e.grouped_entities) {
    				this_proc := &entities[entity_index]
    				this_pkg := files[this_proc.pos.file].pkg
    				io.write_byte(w, '\t')
    				if this_pkg != pkg_index {
    					fmt.wprintf(w, "%s.", str(pkgs[this_pkg].name))
    				}
    				pkg := &pkgs[this_pkg]
    				collection := pkg_to_collection[pkg]
    				name := str(this_proc.name)
    				fmt.wprintf(w, ``, pkg_to_path[pkg], name, collection.base_url)
    				io.write_string(w, name)
    				io.write_string(w, ``)
    				io.write_byte(w, ',')
    				io.write_byte(w, '\n')
    			}
    			fmt.wprintln(w, "}")
    			fmt.wprintln(w, "
    ") } fmt.wprintln(w, `
    `) the_docs := strings.trim_space(str(e.docs)) if the_docs != "" { fmt.wprintln(w, `
    `) fmt.wprintln(w, ` `) write_docs(w, pkg, the_docs) fmt.wprintln(w, `
    `) } } write_entities :: proc(w: io.Writer, title: string, entities: []^doc.Entity) { fmt.wprintf(w, "

    {0:s}

    \n", title) fmt.wprintln(w, `
    `) if len(entities) == 0 { io.write_string(w, "

    This section is empty.

    \n") } else { for e in entities { fmt.wprintln(w, `
    `) write_entity(w, e) fmt.wprintln(w, `
    `) } } fmt.wprintln(w, "
    ") } for eo in entity_ordering { write_entities(w, eo.name, eo.entities) } fmt.wprintln(w, `

    Source Files

    `) fmt.wprintln(w, "
      ") any_hidden := false source_file_loop: for file_index in array(pkg.files) { file := files[file_index] filename := slashpath.base(str(file.name)) switch { case strings.has_suffix(filename, "_windows.odin"), strings.has_suffix(filename, "_darwin.odin"), strings.has_suffix(filename, "_essence.odin"), strings.has_suffix(filename, "_freebsd.odin"), strings.has_suffix(filename, "_wasi.odin"), strings.has_suffix(filename, "_js.odin"), strings.has_suffix(filename, "_freestanding.odin"), strings.has_suffix(filename, "_amd64.odin"), strings.has_suffix(filename, "_i386.odin"), strings.has_suffix(filename, "_arch64.odin"), strings.has_suffix(filename, "_wasm32.odin"), strings.has_suffix(filename, "_wasm64.odin"), false: any_hidden = true continue source_file_loop } fmt.wprintf(w, `
    • %s
    • `, collection.github_url, path, filename, filename) fmt.wprintln(w) } if any_hidden { fmt.wprintln(w, "
    • (hidden platform specific files)
    • ") } fmt.wprintln(w, "
    ") { fmt.wprintln(w, `

    Generation Information

    `) now := time.now() fmt.wprintf(w, "

    Generated with odin version %s (vendor %q) %s_%s @ %v

    \n", ODIN_VERSION, ODIN_VENDOR, ODIN_OS, ODIN_ARCH, now) } fmt.wprintln(w, `
    `) { write_link :: proc(w: io.Writer, id, text: string) { fmt.wprintf(w, `
  • %s`, id, text) } fmt.wprintln(w, ``) } }