Browse Source

All reads now skip stream if in memory.

Jeroen van Rijn 4 years ago
parent
commit
342adb627d
4 changed files with 52 additions and 51 deletions
  1. 1 3
      core/compress/common.odin
  2. 8 1
      core/compress/gzip/example.odin
  3. 32 36
      core/compress/gzip/gzip.odin
  4. 11 11
      core/image/png/png.odin

+ 1 - 3
core/compress/common.odin

@@ -10,7 +10,6 @@ package compress
 
 import "core:io"
 import "core:image"
-// import "core:fmt"
 
 // when #config(TRACY_ENABLE, false) { import tracy "shared:odin-tracy" }
 
@@ -148,7 +147,7 @@ read_slice :: #force_inline proc(z: ^Context, size: int) -> (res: []u8, err: io.
 			return []u8{}, .Short_Buffer;
 		}
 	}
-	// fmt.printf("read_slice of %v bytes fell back to stream.\n", size);
+
 	/*
 		TODO: Try to refill z.input_data from stream, using packed_data as a guide.
 	*/
@@ -190,7 +189,6 @@ peek_data :: #force_inline proc(z: ^Context, $T: typeid) -> (res: T, err: io.Err
 
 	if len(z.input_data) >= size {
 		buf := z.input_data[:size];
-		z.input_data = z.input_data[size:];
 		return (^T)(&buf[0])^, .None;
 	}
 

+ 8 - 1
core/compress/gzip/example.odin

@@ -14,6 +14,8 @@ package gzip
 
 import "core:bytes"
 import "core:os"
+import "core:compress"
+import "core:fmt"
 
 // Small GZIP file with fextra, fname and fcomment present.
 @private
@@ -48,6 +50,8 @@ main :: proc() {
 			stdout("Displaying test vector: ");
 			stdout(bytes.buffer_to_string(&buf));
 			stdout("\n");
+		} else {
+			fmt.printf("gzip.load returned %v\n", err);
 		}
 		bytes.buffer_destroy(&buf);
 		os.exit(0);
@@ -61,7 +65,10 @@ main :: proc() {
 		if file == "-" {
 			// Read from stdin
 			s := os.stream_from_handle(os.stdin);
-			err = load(s, &buf);
+			ctx := &compress.Context{
+				input = s,
+			};
+			err = load(ctx, &buf);
 		} else {
 			err = load(file, &buf);
 		}

+ 32 - 36
core/compress/gzip/gzip.odin

@@ -21,11 +21,6 @@ import "core:io"
 import "core:bytes"
 import "core:hash"
 
-/*
-
-
-*/
-
 Magic :: enum u16le {
 	GZIP = 0x8b << 8 | 0x1f,
 }
@@ -110,7 +105,13 @@ load_from_slice :: proc(slice: []u8, buf: ^bytes.Buffer, allocator := context.al
 	bytes.reader_init(&r, slice);
 	stream := bytes.reader_to_stream(&r);
 
-	err = load_from_stream(stream, buf, allocator);
+	ctx := &compress.Context{
+		input  = stream,
+		input_data = slice,
+		input_fully_in_memory = true,
+		input_refills_from_stream = true,
+	};
+	err = load_from_stream(ctx, buf, allocator);
 
 	return err;
 }
@@ -126,15 +127,14 @@ load_from_file :: proc(filename: string, buf: ^bytes.Buffer, allocator := contex
 	return;
 }
 
-load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := context.allocator) -> (err: Error) {
-	ctx := compress.Context{
-		input  = stream,
-	};
+load_from_stream :: proc(ctx: ^compress.Context, buf: ^bytes.Buffer, allocator := context.allocator) -> (err: Error) {
 	buf := buf;
 	ws := bytes.buffer_to_stream(buf);
 	ctx.output = ws;
 
-	header, e := compress.read_data(&ctx, Header);
+	b: []u8;
+
+	header, e := compress.read_data(ctx, Header);
 	if e != .None {
 		return E_General.File_Too_Short;
 	}
@@ -162,7 +162,7 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 	// printf("os: %v\n", OS_Name[header.os]);
 
 	if .extra in header.flags {
-		xlen, e_extra := compress.read_data(&ctx, u16le);
+		xlen, e_extra := compress.read_data(ctx, u16le);
 		if e_extra != .None {
 			return E_General.Stream_Too_Short;
 		}
@@ -178,14 +178,14 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 
 		for xlen >= 4 {
 			// println("Parsing Extra field(s).");
-			field_id, field_error = compress.read_data(&ctx, [2]u8);
+			field_id, field_error = compress.read_data(ctx, [2]u8);
 			if field_error != .None {
 				// printf("Parsing Extra returned: %v\n", field_error);
 				return E_General.Stream_Too_Short;
 			}
 			xlen -= 2;
 
-			field_length, field_error = compress.read_data(&ctx, u16le);
+			field_length, field_error = compress.read_data(ctx, u16le);
 			if field_error != .None {
 				// printf("Parsing Extra returned: %v\n", field_error);
 				return E_General.Stream_Too_Short;
@@ -200,8 +200,7 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 
 			// printf("    Field \"%v\" of length %v found: ", string(field_id[:]), field_length);
 			if field_length > 0 {
-				field_data := make([]u8, field_length, context.temp_allocator);
-				_, field_error = ctx.input->impl_read(field_data);
+				b, field_error = compress.read_slice(ctx, int(field_length));
 				if field_error != .None {
 					// printf("Parsing Extra returned: %v\n", field_error);
 					return E_General.Stream_Too_Short;
@@ -220,16 +219,15 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 	if .name in header.flags {
 		// Should be enough.
 		name: [1024]u8;
-		b: [1]u8;
 		i := 0;
 		name_error: io.Error;
 
 		for i < len(name) {
-			_, name_error = ctx.input->impl_read(b[:]);
+			b, name_error = compress.read_slice(ctx, 1);
 			if name_error != .None {
 				return E_General.Stream_Too_Short;
 			}
-			if b == 0 {
+			if b[0] == 0 {
 				break;
 			}
 			name[i] = b[0];
@@ -244,16 +242,15 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 	if .comment in header.flags {
 		// Should be enough.
 		comment: [1024]u8;
-		b: [1]u8;
 		i := 0;
 		comment_error: io.Error;
 
 		for i < len(comment) {
-			_, comment_error = ctx.input->impl_read(b[:]);
+			b, comment_error = compress.read_slice(ctx, 1);
 			if comment_error != .None {
 				return E_General.Stream_Too_Short;
 			}
-			if b == 0 {
+			if b[0] == 0 {
 				break;
 			}
 			comment[i] = b[0];
@@ -266,9 +263,8 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 	}
 
 	if .header_crc in header.flags {
-		crc16: [2]u8;
 		crc_error: io.Error;
-		_, crc_error = ctx.input->impl_read(crc16[:]);
+		_, crc_error = compress.read_slice(ctx, 2);
 		if crc_error != .None {
 			return E_General.Stream_Too_Short;
 		}
@@ -284,29 +280,29 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 	code_buffer := compress.Code_Buffer{};
 	cb := &code_buffer;
 
-	zlib_error := zlib.inflate_raw(&ctx, &code_buffer);
-
-	// fmt.printf("ZLIB returned: %v\n", zlib_error);
-
+	zlib_error := zlib.inflate_raw(ctx, &code_buffer);
 	if zlib_error != nil {
 		return zlib_error;
 	}
-
 	/*
 		Read CRC32 using the ctx bit reader because zlib may leave bytes in there.
 	*/
 	compress.discard_to_next_byte_lsb(cb);
 
+	footer_error: io.Error;
+
 	payload_crc_b: [4]u8;
-	payload_len_b: [4]u8;
 	for _, i in payload_crc_b {
-		payload_crc_b[i] = u8(compress.read_bits_lsb(&ctx, cb, 8));
+		if cb.num_bits >= 8 {
+			payload_crc_b[i] = u8(compress.read_bits_lsb(ctx, cb, 8));
+		} else {
+			payload_crc_b[i], footer_error = compress.read_u8(ctx);
+		}
 	}
 	payload_crc := transmute(u32le)payload_crc_b;
-	for _, i in payload_len_b {
-		payload_len_b[i] = u8(compress.read_bits_lsb(&ctx, cb, 8));
-	}
-	payload_len := int(transmute(u32le)payload_len_b);
+
+	payload_len: u32le;
+	payload_len, footer_error = compress.read_data(ctx, u32le);
 
 	payload := bytes.buffer_to_bytes(buf);
 	crc32 := u32le(hash.crc32(payload));
@@ -315,7 +311,7 @@ load_from_stream :: proc(stream: io.Stream, buf: ^bytes.Buffer, allocator := con
 		return E_GZIP.Payload_CRC_Invalid;
 	}
 
-	if len(payload) != payload_len {
+	if len(payload) != int(payload_len) {
 		return E_GZIP.Payload_Length_Invalid;
 	}
 	return nil;

+ 11 - 11
core/image/png/png.odin

@@ -252,17 +252,15 @@ read_chunk :: proc(ctx: ^compress.Context) -> (chunk: Chunk, err: Error) {
 	}
 	chunk.header = ch;
 
-	data := make([]u8, ch.length, context.temp_allocator);
-	_, e2 := ctx.input->impl_read(data);
-	if e2 != .None {
+	chunk.data, e = compress.read_slice(ctx, int(ch.length));
+	if e != .None {
 		return {}, E_General.Stream_Too_Short;
 	}
-	chunk.data = data;
 
 	// Compute CRC over chunk type + data
 	type := (^[4]byte)(&ch.type)^;
 	computed_crc := hash.crc32(type[:]);
-	computed_crc =  hash.crc32(data, computed_crc);
+	computed_crc =  hash.crc32(chunk.data, computed_crc);
 
 	crc, e3 := compress.read_data(ctx, u32be);
 	if e3 != .None {
@@ -359,12 +357,18 @@ load_from_slice :: proc(slice: []u8, options := Options{}, allocator := context.
 	bytes.reader_init(&r, slice);
 	stream := bytes.reader_to_stream(&r);
 
+	ctx := &compress.Context{
+		input = stream,
+		input_data = slice,
+		input_fully_in_memory = true,
+	};
+
 	/*
 		TODO: Add a flag to tell the PNG loader that the stream is backed by a slice.
 		This way the stream reader could avoid the copy into the temp memory returned by it,
 		and instead return a slice into the original memory that's already owned by the caller.
 	*/
-	img, err = load_from_stream(stream, options, allocator);
+	img, err = load_from_stream(ctx, options, allocator);
 
 	return img, err;
 }
@@ -382,7 +386,7 @@ load_from_file :: proc(filename: string, options := Options{}, allocator := cont
 	}
 }
 
-load_from_stream :: proc(stream: io.Stream, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
+load_from_stream :: proc(ctx: ^compress.Context, options := Options{}, allocator := context.allocator) -> (img: ^Image, err: Error) {
 	options := options;
 	if .info in options {
 		options |= {.return_metadata, .do_not_decompress_image};
@@ -405,10 +409,6 @@ load_from_stream :: proc(stream: io.Stream, options := Options{}, allocator := c
 	img.metadata_ptr  = info;
 	img.metadata_type = typeid_of(Info);
 
-	ctx := &compress.Context{
-		input = stream,
-	};
-
 	signature, io_error := compress.read_data(ctx, Signature);
 	if io_error != .None || signature != .PNG {
 		return img, E_PNG.Invalid_PNG_Signature;