read.odin 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. package encoding_hxa
  2. import "core:fmt"
  3. import "core:os"
  4. import "core:mem"
  5. Read_Error :: enum {
  6. None,
  7. Short_Read,
  8. Invalid_Data,
  9. Unable_To_Read_File,
  10. }
  11. read_from_file :: proc(filename: string, print_error := false, allocator := context.allocator) -> (file: File, err: Read_Error) {
  12. context.allocator = allocator
  13. data, ok := os.read_entire_file(filename)
  14. if !ok {
  15. err = .Unable_To_Read_File
  16. return
  17. }
  18. defer if !ok {
  19. delete(data)
  20. } else {
  21. file.backing = data
  22. }
  23. file, err = read(data, filename, print_error, allocator)
  24. return
  25. }
  26. read :: proc(data: []byte, filename := "<input>", print_error := false, allocator := context.allocator) -> (file: File, err: Read_Error) {
  27. Reader :: struct {
  28. filename: string,
  29. data: []byte,
  30. offset: int,
  31. print_error: bool,
  32. }
  33. read_value :: proc(r: ^Reader, $T: typeid) -> (value: T, err: Read_Error) {
  34. remaining := len(r.data) - r.offset
  35. if remaining < size_of(T) {
  36. if r.print_error {
  37. fmt.eprintf("file '%s' failed to read value at offset %v\n", r.filename, r.offset)
  38. }
  39. err = .Short_Read
  40. return
  41. }
  42. ptr := raw_data(r.data[r.offset:])
  43. value = (^T)(ptr)^
  44. r.offset += size_of(T)
  45. return
  46. }
  47. read_array :: proc(r: ^Reader, $T: typeid, count: int) -> (value: []T, err: Read_Error) {
  48. remaining := len(r.data) - r.offset
  49. if remaining < size_of(T)*count {
  50. if r.print_error {
  51. fmt.eprintf("file '%s' failed to read array of %d elements at offset %v\n",
  52. r.filename, count, r.offset)
  53. }
  54. err = .Short_Read
  55. return
  56. }
  57. ptr := raw_data(r.data[r.offset:])
  58. value = mem.slice_ptr((^T)(ptr), count)
  59. r.offset += size_of(T)*count
  60. return
  61. }
  62. read_string :: proc(r: ^Reader, count: int) -> (string, Read_Error) {
  63. buf, err := read_array(r, byte, count)
  64. return string(buf), err
  65. }
  66. read_name :: proc(r: ^Reader) -> (value: string, err: Read_Error) {
  67. len := read_value(r, u8) or_return
  68. data := read_array(r, byte, int(len)) or_return
  69. return string(data[:len]), nil
  70. }
  71. read_meta :: proc(r: ^Reader, capacity: u32le) -> (meta_data: []Meta, err: Read_Error) {
  72. meta_data = make([]Meta, int(capacity))
  73. count := 0
  74. defer meta_data = meta_data[:count]
  75. for &m in meta_data {
  76. m.name = read_name(r) or_return
  77. type := read_value(r, Meta_Value_Type) or_return
  78. if type > max(Meta_Value_Type) {
  79. if r.print_error {
  80. fmt.eprintf("HxA Error: file '%s' has meta value type %d. Maximum value is %d\n",
  81. r.filename, u8(type), u8(max(Meta_Value_Type)))
  82. }
  83. err = .Invalid_Data
  84. return
  85. }
  86. array_length := read_value(r, u32le) or_return
  87. switch type {
  88. case .Int64: m.value = read_array(r, i64le, int(array_length)) or_return
  89. case .Double: m.value = read_array(r, f64le, int(array_length)) or_return
  90. case .Node: m.value = read_array(r, Node_Index, int(array_length)) or_return
  91. case .Text: m.value = read_string(r, int(array_length)) or_return
  92. case .Binary: m.value = read_array(r, byte, int(array_length)) or_return
  93. case .Meta: m.value = read_meta(r, array_length) or_return
  94. }
  95. count += 1
  96. }
  97. return
  98. }
  99. read_layer_stack :: proc(r: ^Reader, capacity: u32le) -> (layers: Layer_Stack, err: Read_Error) {
  100. stack_count := read_value(r, u32le) or_return
  101. layer_count := 0
  102. layers = make(Layer_Stack, stack_count)
  103. defer layers = layers[:layer_count]
  104. for &layer in layers {
  105. layer.name = read_name(r) or_return
  106. layer.components = read_value(r, u8) or_return
  107. type := read_value(r, Layer_Data_Type) or_return
  108. if type > max(type) {
  109. if r.print_error {
  110. fmt.eprintf("HxA Error: file '%s' has layer data type %d. Maximum value is %d\n",
  111. r.filename, u8(type), u8(max(Layer_Data_Type)))
  112. }
  113. err = .Invalid_Data
  114. return
  115. }
  116. data_len := int(layer.components) * int(capacity)
  117. switch type {
  118. case .Uint8: layer.data = read_array(r, u8, data_len) or_return
  119. case .Int32: layer.data = read_array(r, i32le, data_len) or_return
  120. case .Float: layer.data = read_array(r, f32le, data_len) or_return
  121. case .Double: layer.data = read_array(r, f64le, data_len) or_return
  122. }
  123. layer_count += 1
  124. }
  125. return
  126. }
  127. if len(data) < size_of(Header) {
  128. if print_error {
  129. fmt.eprintf("HxA Error: file '%s' has no header\n", filename)
  130. }
  131. err = .Short_Read
  132. return
  133. }
  134. context.allocator = allocator
  135. header := cast(^Header)raw_data(data)
  136. if (header.magic_number != MAGIC_NUMBER) {
  137. if print_error {
  138. fmt.eprintf("HxA Error: file '%s' has invalid magic number 0x%x\n", filename, header.magic_number)
  139. }
  140. err = .Invalid_Data
  141. return
  142. }
  143. r := &Reader{
  144. filename = filename,
  145. data = data[:],
  146. offset = size_of(Header),
  147. print_error = print_error,
  148. }
  149. node_count := 0
  150. file.header = header^
  151. file.nodes = make([]Node, header.internal_node_count)
  152. defer if err != nil {
  153. nodes_destroy(file.nodes)
  154. file.nodes = nil
  155. }
  156. defer file.nodes = file.nodes[:node_count]
  157. for node_idx in 0..<header.internal_node_count {
  158. node := &file.nodes[node_count]
  159. type := read_value(r, Node_Type) or_return
  160. if type > max(Node_Type) {
  161. if r.print_error {
  162. fmt.eprintf("HxA Error: file '%s' has node type %d. Maximum value is %d\n",
  163. r.filename, u8(type), u8(max(Node_Type)))
  164. }
  165. err = .Invalid_Data
  166. return
  167. }
  168. node_count += 1
  169. node.meta_data = read_meta(r, read_value(r, u32le) or_return) or_return
  170. switch type {
  171. case .Meta_Only:
  172. // Okay
  173. case .Geometry:
  174. g: Node_Geometry
  175. g.vertex_count = read_value(r, u32le) or_return
  176. g.vertex_stack = read_layer_stack(r, g.vertex_count) or_return
  177. g.edge_corner_count = read_value(r, u32le) or_return
  178. g.corner_stack = read_layer_stack(r, g.edge_corner_count) or_return
  179. if header.version > 2 {
  180. g.edge_stack = read_layer_stack(r, g.edge_corner_count) or_return
  181. }
  182. g.face_count = read_value(r, u32le) or_return
  183. g.face_stack = read_layer_stack(r, g.face_count) or_return
  184. node.content = g
  185. case .Image:
  186. img: Node_Image
  187. img.type = read_value(r, Image_Type) or_return
  188. dimensions := int(img.type)
  189. if img.type == .Image_Cube {
  190. dimensions = 2
  191. }
  192. img.resolution = {1, 1, 1}
  193. for d in 0..<dimensions {
  194. img.resolution[d] = read_value(r, u32le) or_return
  195. }
  196. size := img.resolution[0]*img.resolution[1]*img.resolution[2]
  197. if img.type == .Image_Cube {
  198. size *= 6
  199. }
  200. img.image_stack = read_layer_stack(r, size) or_return
  201. node.content = img
  202. }
  203. }
  204. return
  205. }