소스 검색

added baked bytes support, vlm load/save

ncannasse 7 년 전
부모
커밋
0f03cbb550
2개의 변경된 파일96개의 추가작업 그리고 27개의 파일을 삭제
  1. 32 27
      h3d/scene/pbr/VolumetricLightmap.hx
  2. 64 0
      hxd/prefab/ContextShared.hx

+ 32 - 27
h3d/scene/pbr/VolumetricLightmap.hx

@@ -3,7 +3,6 @@ package h3d.scene.pbr;
 class VolumetricLightmap extends h3d.scene.Mesh {
 class VolumetricLightmap extends h3d.scene.Mesh {
 
 
 	public var lightProbes:Array<LightProbe> = [];
 	public var lightProbes:Array<LightProbe> = [];
-	public var lightProbeBuffer : h3d.Buffer;
 	public var lightProbeTexture : h3d.mat.Texture;
 	public var lightProbeTexture : h3d.mat.Texture;
 	public var shOrder : Int = 1;
 	public var shOrder : Int = 1;
 	public var voxelSize (default, set) : h3d.Vector;
 	public var voxelSize (default, set) : h3d.Vector;
@@ -34,6 +33,18 @@ class VolumetricLightmap extends h3d.scene.Mesh {
 		voxelSize = new h3d.Vector(1,1,1);
 		voxelSize = new h3d.Vector(1,1,1);
 	}
 	}
 
 
+	public function getProbeCount() {
+		return probeCount.x * probeCount.y * probeCount.z;
+	}
+
+	override function onRemove() {
+		super.onRemove();
+		if( lightProbeTexture != null ) {
+			lightProbeTexture.dispose();
+			lightProbeTexture = null;
+		}
+	}
+
 	function set_voxelSize(newSize) :h3d.Vector {
 	function set_voxelSize(newSize) :h3d.Vector {
 		voxelSize = newSize;
 		voxelSize = newSize;
 		updateProbeCount();
 		updateProbeCount();
@@ -46,6 +57,25 @@ class VolumetricLightmap extends h3d.scene.Mesh {
 						Std.int(Math.max(1,Math.floor(scaleZ/voxelSize.z)) + 1));
 						Std.int(Math.max(1,Math.floor(scaleZ/voxelSize.z)) + 1));
 	}
 	}
 
 
+	public function load( bytes : haxe.io.Bytes ) {
+		bytes = haxe.zip.Uncompress.run(bytes);
+		var count = getProbeCount();
+		if( bytes.length != count * shOrder * shOrder * 4 * 4 )
+			return false;
+		lastBakedProbeIndex = count;
+		lightProbeTexture.uploadPixels(new hxd.Pixels(lightProbeTexture.width, lightProbeTexture.height, bytes, RGBA32F));
+		return true;
+	}
+
+	public function save() : haxe.io.Bytes {
+		var data;
+		if( lightProbeTexture == null )
+			data = haxe.io.Bytes.alloc(0);
+		else
+			data = lightProbeTexture.capturePixels().bytes;
+		return haxe.zip.Compress.run(data,9);
+	}
+
 	override function sync(ctx:RenderContext) {
 	override function sync(ctx:RenderContext) {
 		shader.ORDER = shOrder;
 		shader.ORDER = shOrder;
 		shader.SIZE = lightProbes.length * shader.ORDER * shader.ORDER;
 		shader.SIZE = lightProbes.length * shader.ORDER * shader.ORDER;
@@ -107,32 +137,6 @@ class VolumetricLightmap extends h3d.scene.Mesh {
 		return (localPos.x >= 0 && localPos.y >= 0 && localPos.z >= 0 && localPos.x <= 1 && localPos.y <= 1 && localPos.z <= 1);
 		return (localPos.x >= 0 && localPos.y >= 0 && localPos.z >= 0 && localPos.x <= 1 && localPos.y <= 1 && localPos.z <= 1);
 	}
 	}
 
 
-	// Pack data inside an Uniform Buffer
-	public function packData(){
-		var coefCount : Int = shOrder * shOrder;
-		var size = lightProbes.length;
-		lightProbeBuffer = new h3d.Buffer(size, 4 * coefCount, [UniformBuffer, Dynamic]);
-		var buffer = new hxd.FloatBuffer();
-		var probeIndex : Int = 0;
-		var dataIndex : Int = 0;
-
-		buffer.resize(size * 4);
-
-		while(probeIndex < lightProbes.length) {
-			var index = probeIndex * coefCount * 4;
-			for(i in 0... coefCount) {
-				buffer[index + i * 4 + 0] = lightProbes[probeIndex].sh.coefR[i];
-				buffer[index + i * 4 + 1] = lightProbes[probeIndex].sh.coefG[i];
-				buffer[index + i * 4 + 2] = lightProbes[probeIndex].sh.coefB[i];
-				buffer[index + i * 4 + 3] = 0;
-			}
-			++probeIndex;
-		}
-
-		lightProbeBuffer.uploadVector(buffer, 0, size, 0);
-		shader.lightProbeBuffer = lightProbeBuffer;
-	}
-
 	// Pack data inside a 2D texture
 	// Pack data inside a 2D texture
 	public function packDataInsideTexture(){
 	public function packDataInsideTexture(){
 		var coefCount : Int = shOrder * shOrder;
 		var coefCount : Int = shOrder * shOrder;
@@ -140,6 +144,7 @@ class VolumetricLightmap extends h3d.scene.Mesh {
 		var sizeY = probeCount.y * probeCount.z;
 		var sizeY = probeCount.y * probeCount.z;
 
 
 		if(lightProbeTexture == null || lightProbeTexture.width != sizeX || lightProbeTexture.height != sizeY){
 		if(lightProbeTexture == null || lightProbeTexture.width != sizeX || lightProbeTexture.height != sizeY){
+			if( lightProbeTexture != null ) lightProbeTexture.dispose();
 			lightProbeTexture = new h3d.mat.Texture(sizeX, sizeY, [Dynamic], RGBA32F);
 			lightProbeTexture = new h3d.mat.Texture(sizeX, sizeY, [Dynamic], RGBA32F);
 			lightProbeTexture.filter = Nearest;
 			lightProbeTexture.filter = Nearest;
 		}
 		}

+ 64 - 0
hxd/prefab/ContextShared.hx

@@ -13,8 +13,11 @@ class ContextShared {
 	public var contexts : Map<Prefab,Context>;
 	public var contexts : Map<Prefab,Context>;
 	public var references : Map<Prefab,Array<Context>>;
 	public var references : Map<Prefab,Array<Context>>;
 	public var cleanups : Array<Void->Void>;
 	public var cleanups : Array<Void->Void>;
+	public var currentPath : String;
+
 	var cache : h3d.prim.ModelCache;
 	var cache : h3d.prim.ModelCache;
 	var shaderCache : ShaderDefCache;
 	var shaderCache : ShaderDefCache;
+	var bakedData : Map<String, haxe.io.Bytes>;
 
 
 	public function new() {
 	public function new() {
 		root2d = new h2d.Sprite();
 		root2d = new h2d.Sprite();
@@ -67,4 +70,65 @@ class ContextShared {
 		return cache.loadTexture(null, path);
 		return cache.loadTexture(null, path);
 	}
 	}
 
 
+	public function loadBakedBytes( file : String ) {
+		if( bakedData == null ) loadBakedData();
+		return bakedData.get(file);
+	}
+
+	public function saveBakedBytes( file : String, bytes : haxe.io.Bytes ) {
+		if( bakedData == null ) loadBakedData();
+		if( bytes == null )
+			bakedData.remove(file);
+		else
+			bakedData.set(file, bytes);
+		var bytes = new haxe.io.BytesOutput();
+		bytes.writeString("BAKE");
+		var keys = Lambda.array({ iterator : bakedData.keys });
+		bytes.writeInt32(keys.length);
+		var headerSize = 8;
+		for( name in keys )
+			headerSize += 2 + name.length + 8;
+		for( name in keys ) {
+			bytes.writeUInt16(name.length);
+			bytes.writeString(name);
+			bytes.writeInt32(headerSize);
+			bytes.writeInt32(bakedData.get(name).length);
+		}
+		for( name in keys )
+			bytes.write(bakedData.get(name));
+		saveBakedFile(bytes.getBytes());
+	}
+
+	function saveBakedFile( bytes : haxe.io.Bytes ) {
+		throw "Don't know how to save baked file";
+	}
+
+	function loadBakedFile() {
+		var path = new haxe.io.Path(currentPath);
+		path.ext = "bake";
+		return try hxd.res.Loader.currentInstance.load(path.toString()).entry.getBytes() catch( e : hxd.res.NotFound ) null;
+	}
+
+	function loadBakedData() {
+		bakedData = new Map();
+		var data = loadBakedFile();
+		if( data == null )
+			return;
+		if( data.getString(0,4) != "BAKE" )
+			throw "Invalid bake file";
+		var count = data.getInt32(4);
+		var pos = 8;
+		for( i in 0...count ) {
+			var len = data.getUInt16(pos);
+			pos += 2;
+			var name = data.getString(pos, len);
+			pos += len;
+			var bytesPos = data.getInt32(pos);
+			pos += 4;
+			var bytesLen = data.getInt32(pos);
+			pos += 4;
+			bakedData.set(name,data.sub(bytesPos,bytesLen));
+		}
+	}
+
 }
 }