Browse Source

WIP: Updating & cleaning up framework docs

BearishSun 7 years ago
parent
commit
8c170d61bf
66 changed files with 623 additions and 417 deletions
  1. 1 1
      Documentation/Doxygen/NativeLayout.xml
  2. 2 2
      Documentation/Manuals/Native/User/advancedAnimation.md
  3. 2 2
      Documentation/Manuals/Native/User/advancedRtti.md
  4. 0 67
      Documentation/Manuals/Native/User/advancedStartup.md
  5. 8 6
      Documentation/Manuals/Native/User/advancedTextures.md
  6. 1 1
      Documentation/Manuals/Native/User/animation.md
  7. 5 5
      Documentation/Manuals/Native/User/animationClip.md
  8. 13 1
      Documentation/Manuals/Native/User/cameras.md
  9. 1 1
      Documentation/Manuals/Native/User/colliders.md
  10. 11 11
      Documentation/Manuals/Native/User/creatingMeshes.md
  11. 8 6
      Documentation/Manuals/Native/User/customComponents.md
  12. 1 1
      Documentation/Manuals/Native/User/fileSystem.md
  13. 4 4
      Documentation/Manuals/Native/User/guiElements.md
  14. 3 3
      Documentation/Manuals/Native/User/guiLayouts.md
  15. 23 26
      Documentation/Manuals/Native/User/guiSetup.md
  16. 3 3
      Documentation/Manuals/Native/User/guiStyles.md
  17. 0 78
      Documentation/Manuals/Native/User/imageBasedLighting.md
  18. 1 1
      Documentation/Manuals/Native/User/importingAudio.md
  19. 1 1
      Documentation/Manuals/Native/User/importingFonts.md
  20. 54 0
      Documentation/Manuals/Native/User/indirectLighting.md
  21. 1 1
      Documentation/Manuals/Native/User/joints.md
  22. 1 1
      Documentation/Manuals/Native/User/lights.md
  23. 1 1
      Documentation/Manuals/Native/User/logging.md
  24. 3 3
      Documentation/Manuals/Native/User/math.md
  25. 3 3
      Documentation/Manuals/Native/User/memory.md
  26. 92 0
      Documentation/Manuals/Native/User/nonComponentApproach.md
  27. 4 4
      Documentation/Manuals/Native/User/offscreenRendering.md
  28. 1 1
      Documentation/Manuals/Native/User/physicalMaterial.md
  29. 2 2
      Documentation/Manuals/Native/User/physicsMesh.md
  30. 1 1
      Documentation/Manuals/Native/User/profiling.md
  31. 48 0
      Documentation/Manuals/Native/User/reflectionProbes.md
  32. 69 9
      Documentation/Manuals/Native/User/renderSettings.md
  33. 3 3
      Documentation/Manuals/Native/User/rigidbodies.md
  34. 90 0
      Documentation/Manuals/Native/User/savingScene.md
  35. 18 18
      Documentation/Manuals/Native/User/serializingObjects.md
  36. 32 0
      Documentation/Manuals/Native/User/skybox.md
  37. 3 3
      Documentation/Manuals/Native/User/smartPointers.md
  38. 3 3
      Documentation/Manuals/Native/User/spriteTextures.md
  39. 42 6
      Documentation/Manuals/Native/User/strings.md
  40. 2 2
      Documentation/Manuals/Native/User/surfaceShaders.md
  41. 1 1
      Documentation/Manuals/Native/User/time.md
  42. 3 5
      Documentation/Manuals/Native/User/windows.md
  43. 1 1
      Documentation/Manuals/Native/advMemAlloc.md
  44. 3 3
      Documentation/Manuals/Native/apiRefPages.md
  45. 7 7
      Documentation/Manuals/Native/architecture.md
  46. 2 2
      Documentation/Manuals/Native/codeStyle.md
  47. 1 1
      Documentation/Manuals/Native/commandBuffers.md
  48. 1 1
      Documentation/Manuals/Native/coreThread.md
  49. 2 2
      Documentation/Manuals/Native/customGui.md
  50. 1 1
      Documentation/Manuals/Native/customImporters.md
  51. 1 1
      Documentation/Manuals/Native/customRenderer.md
  52. 0 1
      Documentation/Manuals/Native/devManuals.md
  53. 4 4
      Documentation/Manuals/Native/gettingStarted.md
  54. 1 1
      Documentation/Manuals/Native/gpuBuffers.md
  55. 3 3
      Documentation/Manuals/Native/gpuPrograms.md
  56. 11 10
      Documentation/Manuals/Native/manuals.md
  57. 1 1
      Documentation/Manuals/Native/nonProgrammableStates.md
  58. 2 2
      Documentation/Manuals/Native/plugins.md
  59. 0 72
      Documentation/Manuals/Native/porting.md
  60. 1 1
      Documentation/Manuals/Native/quickref.md
  61. 1 1
      Documentation/Manuals/Native/rendererExtensions.md
  62. 7 7
      Source/BansheeCore/RenderAPI/BsRenderAPI.cpp
  63. 4 1
      Source/BansheeCore/Renderer/BsLightProbeVolume.h
  64. 1 1
      Source/BansheeCore/Renderer/BsRenderSettings.cpp
  65. 0 3
      Source/BansheeCore/Resources/BsResources.cpp
  66. 3 3
      Source/BansheeCore/Scene/BsSceneManager.h

+ 1 - 1
Documentation/Doxygen/NativeLayout.xml

@@ -4,7 +4,7 @@
   <navindex>
   <navindex>
     <tab type="mainpage" visible="yes" title=""/>
     <tab type="mainpage" visible="yes" title=""/>
 	<tab type="user" url="manuals.html" title="User manuals"/>
 	<tab type="user" url="manuals.html" title="User manuals"/>
-	<tab type="user" url="devManuals.html" title="Dev. manuals"/>
+	<tab type="user" url="dev_manuals.html" title="Dev. manuals"/>
     <tab type="user" url="group___layers.html" title="Core API"/>
     <tab type="user" url="group___layers.html" title="Core API"/>
 	<tab type="user" url="group___internals.html" title="Internals"/>
 	<tab type="user" url="group___internals.html" title="Internals"/>
 	<tab type="user" url="group___plugins.html" title="Plugins"/>
 	<tab type="user" url="group___plugins.html" title="Plugins"/>

+ 2 - 2
Documentation/Manuals/Native/User/advancedAnimation.md

@@ -1,7 +1,7 @@
 Advanced animation				{#advancedAnimation}
 Advanced animation				{#advancedAnimation}
 ===============
 ===============
 
 
-So far we have shown how you can use the **CAnimation** component to play a single animation clip at once. When it comes to skeletal animation the system is capable of a few more advanced features that allow you to play and blend multiple animation clips at once, as well as other advanced functionality.
+So far we have shown how you can use the **Animation** component to play a single animation clip at once. When it comes to skeletal animation the system is capable of a few more advanced features that allow you to play and blend multiple animation clips at once, as well as other advanced functionality.
 
 
 # Cross fade
 # Cross fade
 Often your animation system might need to transition from playing one animation clip to another (e.g. moving from a walk to a run animation). In that case you can use @ref bs::CAnimation::crossFade "CAnimation::crossFade()" instead of **CAnimation::play()**. Cross fade will slowly fade in the new animation clip while fading out the previously playing one, over the specified time period.
 Often your animation system might need to transition from playing one animation clip to another (e.g. moving from a walk to a run animation). In that case you can use @ref bs::CAnimation::crossFade "CAnimation::crossFade()" instead of **CAnimation::play()**. Cross fade will slowly fade in the new animation clip while fading out the previously playing one, over the specified time period.
@@ -57,7 +57,7 @@ animation->blend1D(blendInfo, 0.75f);
 ## 2D blending
 ## 2D blending
 2D blending works similarly as 1D blending, except it uses a two-dimensional value for the weight. It also interpolates between four animation clips at once instead of two. It is limited to four clips, and a [0, 1] range. Therefore the clips can be imagined as being on corners of a square.
 2D blending works similarly as 1D blending, except it uses a two-dimensional value for the weight. It also interpolates between four animation clips at once instead of two. It is limited to four clips, and a [0, 1] range. Therefore the clips can be imagined as being on corners of a square.
 
 
-For example you can use this blend type to interpolate between character aiming animations in a third party game. You would adjust the 2D weight based on the vertical and horizontal directions the character is aiming and the system would interpolate between look up/down/left/right animations to animate the character in the direction the user is aiming.
+For example you can use this blend type to interpolate between character aiming animations in a third person game. You would adjust the 2D weight based on the vertical and horizontal directions the character is aiming and the system would interpolate between look up/down/left/right animations to animate the character in the direction the user is aiming.
 
 
 To start a 2D blend operation call @ref bs::CAnimation::blend2D "CAnimation::blend2D()". This method expects a @ref bs::Blend2DInfo "Blend2DInfo" structure, containing four animation clips to blend between, as well as a 2D weight determining how the animations are blended together.
 To start a 2D blend operation call @ref bs::CAnimation::blend2D "CAnimation::blend2D()". This method expects a @ref bs::Blend2DInfo "Blend2DInfo" structure, containing four animation clips to blend between, as well as a 2D weight determining how the animations are blended together.
 
 

+ 2 - 2
Documentation/Manuals/Native/User/advancedRtti.md

@@ -185,7 +185,7 @@ template<> struct RTTIPlainType<std::string>
 }; 
 }; 
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-> Note: Banshee already provides many of such specializations, including ones for strings, vectors and maps.
+> Note: bs::f already provides many of such specializations, including ones for strings, vectors and maps.
 
 
 Each specialization must implement all three **toMemory()**, **fromMemory()** and **getDynamicSize()** methods. It must also provide a flag **hasDynamicSize** which determines whether or not it has dynamic size. Any structure whose size varies with each instance (like a string) must set this flag to true. You must also set it to true if the size is static but larger than 255 bytes.
 Each specialization must implement all three **toMemory()**, **fromMemory()** and **getDynamicSize()** methods. It must also provide a flag **hasDynamicSize** which determines whether or not it has dynamic size. Any structure whose size varies with each instance (like a string) must set this flag to true. You must also set it to true if the size is static but larger than 255 bytes.
 
 
@@ -211,7 +211,7 @@ BS_ALLOW_MEMCPY_SERIALIZATION(SimpleData)
  - @ref bs::rttiGetElemSize "rttiGetElemSize()" - Returns a size an object
  - @ref bs::rttiGetElemSize "rttiGetElemSize()" - Returns a size an object
  
  
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
-// Assuming Vector has a RTTIPlainType<T> specialization (which it has, Banshee provides it by default)
+// Assuming Vector has a RTTIPlainType<T> specialization (which it has, bs::f provides it by default)
 
 
 Vector<SimpleData> myData;
 Vector<SimpleData> myData;
 // fill out myData
 // fill out myData

+ 0 - 67
Documentation/Manuals/Native/User/advancedStartup.md

@@ -1,67 +0,0 @@
-Advanced startup				{#advancedStartup}
-===============
-
-We have already shown how to perform basic start-up of a Banshee application.
-
-~~~~~~~~~~~~~{.cpp}
-Application::startUp(VideoMode(1280, 720), "My app", false);
-
-// Set up your scene here
-
-Application::instance().runMainLoop();
-Application::shutDown();
-~~~~~~~~~~~~~
-
-This form of start-up is adequate for applications using only the scene object/component system for implementing game logic. Applications wishing to implement systems that aren't components must use a more advanced form of start up. Implementing systems in such a way is necessary when extending the engine with new features, using low level rendering API, or if its just a preference.
-
-To perform advanced start-up you must create your own version of the **Application** class by deriving from it. Once derived you can override any of the following methods:
- - @ref bs::Application::onStartUp "Application::onStartUp()" - Called when the application is first starting up.
- - @ref bs::Application::preUpdate "Application::preUpdate()" - Called every frame, just before component and plugin updates are triggered.
- - @ref bs::Application::postUpdate "Application::postUpdate()" - Called every frame, after component and plugin updates are triggered.
- - @ref bs::Application::onShutDown "Application::onShutDown()" - Called just before the application is about to shut down.
- 
-~~~~~~~~~~~~~{.cpp}
-class MyApplication : public Application
-{
-public:
-	// Pass along the start-up structure to the parent
-	MyApplication(const START_UP_DESC& desc)
-		:Application(desc)
-	{ }
-
-private:
-	void onStartUp() override
-	{
-		// Ensure all parent systems are initialized first
-		Application::onStartUp();
-
-		// Do your own initialization (i.e. start your custom systems)
-	}
-
-	void onShutDown() override
-	{
-		// Do your own shut-down (i.e. shut down your custom systems)
-
-		// Shut-down engine components
-		Application::onShutDown();
-	}
-
-	void preUpdate() override
-	{
-		// Execute per-frame logic of your custom systems (optionally do it in postUpdate)
-	}
-};
-~~~~~~~~~~~~~
-
-Once you have your own application override, you can now start the application by calling @ref bs::Application::startUp<T> "Application::startUp<T>()" with the template parameter being your application override class. Everything else regarding start-up remains the same.
-
-~~~~~~~~~~~~~{.cpp}
-Application::startUp<MyApplication>(VideoMode(1280, 720), "My app", false);
-
-// Set up your scene here
-
-Application::instance().runMainLoop();
-Application::shutDown();
-~~~~~~~~~~~~~
-
-Take a look at the *ExampleLowLevelRendering* for a working example of how to use advanced start-up in order to perform low-level rendering.

+ 8 - 6
Documentation/Manuals/Native/User/advancedTextures.md

@@ -1,10 +1,10 @@
-Creating textures				{#advancedTextures}
+Advanced textures				{#advancedTextures}
 ===============
 ===============
 
 
-In this manual we'll learn how to create textures manually, modify their contents and even read-back texture data.
+In this manual we'll learn how to create textures manually, modify their contents and even read-back texture data that was written on the GPU.
 
 
 # Creating textures
 # Creating textures
-To create a texture call @ref bs::Texture::create "Texture::create". You'll need to populate the @ref bs::TEXTURE_DESC "TEXTURE_DESC" structure and pass it as a parameter. The structure requires you to populate these properties at minimum:
+To create a texture call @ref bs::Texture::create "Texture::create()". You'll need to populate the @ref bs::TEXTURE_DESC "TEXTURE_DESC" structure and pass it as a parameter. The structure requires you to populate these properties at minimum:
  - @ref bs::TEXTURE_DESC::type "TEXTURE_DESC::type" - Allows you to choose between 1D/2D/3D or cube-map textures using the @ref bs::TextureType "TextureType" enum
  - @ref bs::TEXTURE_DESC::type "TEXTURE_DESC::type" - Allows you to choose between 1D/2D/3D or cube-map textures using the @ref bs::TextureType "TextureType" enum
  - @ref bs::TEXTURE_DESC::format "TEXTURE_DESC::format" - Allows you to choose a format for each individual pixel in the texture, using the @ref bs::PixelFormat "PixelFormat" enum
  - @ref bs::TEXTURE_DESC::format "TEXTURE_DESC::format" - Allows you to choose a format for each individual pixel in the texture, using the @ref bs::PixelFormat "PixelFormat" enum
  - @ref bs::TEXTURE_DESC::width "TEXTURE_DESC::width" - Width of the texture, in pixels
  - @ref bs::TEXTURE_DESC::width "TEXTURE_DESC::width" - Width of the texture, in pixels
@@ -20,7 +20,7 @@ When it comes to texture types there four kinds of textures:
 You may also set these optional properties:
 You may also set these optional properties:
  - @ref bs::TEXTURE_DESC::numMips "TEXTURE_DESC::numMips" - A texture with mip-maps will contain a set of scaled down versions of itself that are used by the GPU for anti-aliasing. Specify zero to use no mip maps. You can use the helper function @ref bs::PixelUtil::getMaxMipmaps "PixelUtil::getMaxMipmaps()" to return the maximum possible mip-map count for a specific set of dimensions. 
  - @ref bs::TEXTURE_DESC::numMips "TEXTURE_DESC::numMips" - A texture with mip-maps will contain a set of scaled down versions of itself that are used by the GPU for anti-aliasing. Specify zero to use no mip maps. You can use the helper function @ref bs::PixelUtil::getMaxMipmaps "PixelUtil::getMaxMipmaps()" to return the maximum possible mip-map count for a specific set of dimensions. 
  - @ref bs::TEXTURE_DESC::numArraySlices "TEXTURE_DESC::numArraySlices" - Specify number higher than 1 in order to create an array of textures. This is primarily used for low-level rendering purposes. Texture arrays are not supported for 3D textures.
  - @ref bs::TEXTURE_DESC::numArraySlices "TEXTURE_DESC::numArraySlices" - Specify number higher than 1 in order to create an array of textures. This is primarily used for low-level rendering purposes. Texture arrays are not supported for 3D textures.
- - @ref bs::TEXTURE_DESC::hwGamma "TEXTURE_DESC::hwGamma" - When true, it specifies if the data in the texture is gamma corrected. When performing reads on such texture (e.g. in the shader) the GPU will transform the texture data back to linear space before returning the value. When a texture is used as a render target, the GPU will automatically convert from linear space into gamma space when rendering to the texture. Only relevant for 2D textures.
+ - @ref bs::TEXTURE_DESC::hwGamma "TEXTURE_DESC::hwGamma" - When true, it specifies if the data in the texture is gamma corrected. When performing reads on such texture in a shader the GPU will transform the texture data back to linear space before returning the value. When a texture is used as a render target, the GPU will automatically convert from linear space into gamma space when rendering to the texture. Only relevant for 2D textures.
  - @ref bs::TEXTURE_DESC::numSamples "TEXTURE_DESC::numSamples" - Specifies the number of samples per pixel. This is used primarily for multi-sample antialiasing. This is only relevant for 2D textures, and only for textures used as render targets. You cannot read or write from/to multi-sample textures manually.
  - @ref bs::TEXTURE_DESC::numSamples "TEXTURE_DESC::numSamples" - Specifies the number of samples per pixel. This is used primarily for multi-sample antialiasing. This is only relevant for 2D textures, and only for textures used as render targets. You cannot read or write from/to multi-sample textures manually.
  - @ref bs::TEXTURE_DESC::usage "TEXTURE_DESC::usage" - Flags that control how is the texture allowed to be used, represented by the @ref bs::TextureUsage "TextureUsage" enum
  - @ref bs::TEXTURE_DESC::usage "TEXTURE_DESC::usage" - Flags that control how is the texture allowed to be used, represented by the @ref bs::TextureUsage "TextureUsage" enum
  
  
@@ -46,6 +46,8 @@ desc.format = PF_R8G8B8A8;
 HTexture texture = Texture::create(desc);
 HTexture texture = Texture::create(desc);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
+> Low level rendering API is explained as a part of the developer manuals.
+
 # Writing data
 # Writing data
 Once a texture has been created you might want to write some data to it. This is accomplished by calling @ref bs::Texture::writeData "Texture::writeData()". The method accepts a @ref bs::PixelData "PixelData" object, as well as a mip-map level and a face to write to.
 Once a texture has been created you might want to write some data to it. This is accomplished by calling @ref bs::Texture::writeData "Texture::writeData()". The method accepts a @ref bs::PixelData "PixelData" object, as well as a mip-map level and a face to write to.
 
 
@@ -63,7 +65,7 @@ Once created you can set the color of each pixel by calling @ref bs::PixelData::
 Vector<Color> colors;
 Vector<Color> colors;
 for(UINT32 y = 0; y < 128; y++)
 for(UINT32 y = 0; y < 128; y++)
 	for(UINT32 x = 0; x < 128; x++)
 	for(UINT32 x = 0; x < 128; x++)
-		colors.push_back(Color(x * 2.0f, y * 2.0f, 0.0f, 1.0f));
+		colors.push_back(Color(x / 128.0f, y / 128.0f, 0.0f, 1.0f));
 
 
 pixelData->setColors(colors);		
 pixelData->setColors(colors);		
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
@@ -131,7 +133,7 @@ SPtr<PixelData> pixelData = texProps.allocBuffer(0, 2);
 When you are sure you will overwrite all the contents of a texture, make sure to set the last parameter of **Texture::writeData()** to true. This ensures the system can more optimally execute the transfer, without requiring the GPU to finish its current action (which can be considerably slow if it is currently using that particular texture).
 When you are sure you will overwrite all the contents of a texture, make sure to set the last parameter of **Texture::writeData()** to true. This ensures the system can more optimally execute the transfer, without requiring the GPU to finish its current action (which can be considerably slow if it is currently using that particular texture).
 
 
 ## Generating mip-maps
 ## Generating mip-maps
-Mip-maps are generally created automatically from a source texture, rather than by manually setting their pixels. Therefore Banshee provides @ref bs::PixelUtil::genMipmaps "PixelUtil::genMipmaps()" method that accepts a **PixelData** object containing pixels to generate mip levels from. A maximum number of mip-maps levels is then generated and output. You can optionally customize mip-map generating by providing a @ref bs::MipMapGenOptions "MipMapGenOptions" object.
+Mip-maps are generally created automatically from a source texture, rather than by manually setting their pixels. Therefore bs::f provides @ref bs::PixelUtil::genMipmaps "PixelUtil::genMipmaps()" method that accepts a **PixelData** object containing pixels to generate mip levels from. A maximum number of mip-maps levels is then generated and output. You can optionally customize mip-map generation by providing a @ref bs::MipMapGenOptions "MipMapGenOptions" object.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 SPtr<PixelData> pixelData = "...";
 SPtr<PixelData> pixelData = "...";

+ 1 - 1
Documentation/Manuals/Native/User/animation.md

@@ -87,7 +87,7 @@ for(UINT32 i = 0; i < numChannels; i++)
 # Mesh culling
 # Mesh culling
 Normally, when objects are about to be rendered, the system tries to detect if an object is in view of the camera or not. This ensures that the GPU doesn't waste time on objects that are certain not to be visible. For the purposes of culling the system uses an approximation of the mesh to be rendered, in the form of its bounding box. This is efficient because that bounding box can be pre-calculated, stored and then easily checked for visibility.
 Normally, when objects are about to be rendered, the system tries to detect if an object is in view of the camera or not. This ensures that the GPU doesn't waste time on objects that are certain not to be visible. For the purposes of culling the system uses an approximation of the mesh to be rendered, in the form of its bounding box. This is efficient because that bounding box can be pre-calculated, stored and then easily checked for visibility.
 
 
-But with animation the object is constantly transforming, and it is not efficient to re-calculate the bounding box with each new frame of the animation. The system then instead uses the same bounding box it would have used if the object is static. This means that if the animated object's mesh ever leaves the bounds of that box, the system could decide to cull the object, even though some part of it is still visible. This can result in noticeable graphical artifacts, but Banshee provides a way to fix the issue by providing your own bounds.
+But with animation the object is constantly transforming, and it is not efficient to re-calculate the bounding box with each new frame of the animation. The system then instead uses the same bounding box it would have used if the object is static. This means that if the animated object's mesh ever leaves the bounds of that box, the system could decide to cull the object, even though some part of it is still visible. This can result in noticeable graphical artifacts, but bs::f provides a way to fix the issue by providing your own bounds.
 
 
 By providing your own bounding box, you can ensure the box is large enough to cover the entire range of motion of the animation, so that mesh at no point can leave it. This ensures no incorrect culling will happen. To do this you must first enable custom bounds by calling @ref bs::CAnimation::setUseBounds "CAnimation::setUseBounds()", followed by setting the actual bounds though @ref bs::CAnimation::setBounds "CAnimation::setBounds()".
 By providing your own bounding box, you can ensure the box is large enough to cover the entire range of motion of the animation, so that mesh at no point can leave it. This ensures no incorrect culling will happen. To do this you must first enable custom bounds by calling @ref bs::CAnimation::setUseBounds "CAnimation::setUseBounds()", followed by setting the actual bounds though @ref bs::CAnimation::setBounds "CAnimation::setBounds()".
 
 

+ 5 - 5
Documentation/Manuals/Native/User/animationClip.md

@@ -1,7 +1,7 @@
 Loading animation clips						{#animationClip}
 Loading animation clips						{#animationClip}
 ===============
 ===============
 
 
-When it comes to animating 3D objects (meshes), Banshee supports two types of animation:
+When it comes to animating 3D objects (meshes), bs::f supports two types of animation:
  - Skeletal - Each vertex on the mesh is assigned to a bone using and index and a weight - this is called a skin. Each bone is part of a hierarchy which is called a skeleton. The bones are then animated using animation clips, and as the bones move, so do the vertices (skin) attached to them. 
  - Skeletal - Each vertex on the mesh is assigned to a bone using and index and a weight - this is called a skin. Each bone is part of a hierarchy which is called a skeleton. The bones are then animated using animation clips, and as the bones move, so do the vertices (skin) attached to them. 
  - Morph - Many different versions of the same mesh exist, representing different shapes of the mesh. By interpolating between the shapes animation is produced. This interpolation is also controlled by animation clips.
  - Morph - Many different versions of the same mesh exist, representing different shapes of the mesh. By interpolating between the shapes animation is produced. This interpolation is also controlled by animation clips.
  
  
@@ -13,7 +13,7 @@ In general it is preferred to use skeletal animation wherever possible, as it ha
 
 
 Both animation types are controlled via @ref bs::AnimationClip "AnimationClip"%s. 
 Both animation types are controlled via @ref bs::AnimationClip "AnimationClip"%s. 
 
 
-Animation clip consists of a set of animation curves, each animating either a bone or a set of morph shapes. Each animation curve has a set of keyframes (time and position pairs). As animation is playing, time moves forward and the system interpolates between those keyframes and applies the latest available value, which produces the animation. Knowledge of this internal structure is not necessary to play animation, but it will be useful to know when we explain some more advanced features below.
+Animation clip consists of a set of animation curves, each animating either a bone or a set of morph shapes. Each animation curve has a set of keyframes (time and value pairs). As animation is playing, time moves forward and the system interpolates between those keyframes and applies the latest available value, which produces the animation. Knowledge of this internal structure is not necessary to play animation, but it will be useful to know when we explain some more advanced features below.
 
 
 # Import
 # Import
 Animation clips are imported from the same source file that contains the mesh object. The import process is similar to how we imported physics meshes, using **Importer::importAll()** to retrieve the clips.
 Animation clips are imported from the same source file that contains the mesh object. The import process is similar to how we imported physics meshes, using **Importer::importAll()** to retrieve the clips.
@@ -46,7 +46,7 @@ These aren't properties you need to access manually for normal animation playbac
 
 
 # Advanced
 # Advanced
 ## Splitting
 ## Splitting
-Often the creator of the animation will place several animations into the same set of animation curves, one playing after another. When imported in Banshee this will result in a single continous animation clip. This is not useful and in such cases you can break up the animation into multiple clips by populating a set of @ref bs::AnimationSplitInfo "AnimationSplitInfo" structures, and providing them to @ref bs::MeshImportOptions::setAnimationClipSplits "MeshImportOptions::setAnimationClipSplits()".
+Often the creator of the animation will place several animations into the same set of animation curves, one playing after another. When imported in bs::f this will result in a single continous animation clip. This is not useful and in such cases you can break up the animation into multiple clips by populating a set of @ref bs::AnimationSplitInfo "AnimationSplitInfo" structures, and providing them to @ref bs::MeshImportOptions::setAnimationClipSplits "MeshImportOptions::setAnimationClipSplits()".
 
 
 Each of **AnimationSplitInfo** entries requires the starting and ending frame of the animation, as well as a name to make it easier to identify. Starting/ending frames are something you must receive from the animation creator, or guess from animation playback.
 Each of **AnimationSplitInfo** entries requires the starting and ending frame of the animation, as well as a name to make it easier to identify. Starting/ending frames are something you must receive from the animation creator, or guess from animation playback.
 
 
@@ -80,9 +80,9 @@ for(auto& entry : resource)
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 ## Keyframe reduction
 ## Keyframe reduction
-Tools that create animation will often output a large set of animation keyframes, usually at a fixed rate (e.g. 60 per second). In most cases this amount of keyframes is not necessary as many of them are static and change very slowly. Additionally Banshee uses keyframe tangents to better approximate the animation curve, ensuring less keyframes need to be used. 
+Tools that create animation will often output a large set of animation keyframes, usually at a fixed rate (e.g. 60 per second). In most cases this amount of keyframes is not necessary as many of them are static and change very slowly. Additionally bs::f uses keyframe tangents to better approximate the animation curve, ensuring less keyframes need to be used. 
 
 
-By enabling @ref bs::MeshImportOptions::setKeyFrameReduction "MeshImportOptions::setKeyFrameReduction()" you can ensure that Banshee eliminates any keyframes it deems unnecessary. This can greately reduce the memory usage of animation clips, but might yield animation clips that don't look exactly as imagined by the creator. In most cases you should enable this unless you notice problems.
+By enabling @ref bs::MeshImportOptions::setKeyFrameReduction "MeshImportOptions::setKeyFrameReduction()" you can ensure that bs::f eliminates any keyframes it deems unnecessary. This can greately reduce the memory usage of animation clips, but might yield animation clips that don't look exactly as imagined by the creator. In most cases you should enable this unless you notice problems.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 importOptions->setKeyFrameReduction(true);
 importOptions->setKeyFrameReduction(true);

+ 13 - 1
Documentation/Manuals/Native/User/cameras.md

@@ -94,4 +94,16 @@ SPtr<RenderWindow> primaryWindow = gApplication().getPrimaryWindow();
 auto& windowProps = newWindow->getProperties();
 auto& windowProps = newWindow->getProperties();
 
 
 camera->setOrthoWindow(windowProps.getWidth(), windowProps.getHeight());
 camera->setOrthoWindow(windowProps.getWidth(), windowProps.getHeight());
-~~~~~~~~~~~~~
+~~~~~~~~~~~~~
+
+# Multi-sample anti-aliasing
+To achieve higher rendering quality you may enable MSAA per camera. This will ensure that each rendered pixel receives multiple samples which are then averaged to produce the final pixel color. This process reduced aliasing on pixels that have discontinuities, like pixels that are on a boundary between two surfaces. This reduces what are often called "jaggies". 
+
+MSAA can be enabled by providing a values of 1, 2, 4 or 8 to @ref bs::CCamera::setMSAACount() "CCamera::setMSAACount()". The value determines number of samples per pixel, where 1 means no MSAA. MSAA can be quite performance heavy, and larger MSAA values require proportionally more performance. 
+
+~~~~~~~~~~~~~{.cpp}
+// Enable 4X MSAA
+camera->setMSAACount(4);
+~~~~~~~~~~~~~
+
+@ref TODO_IMAGE

+ 1 - 1
Documentation/Manuals/Native/User/colliders.md

@@ -14,7 +14,7 @@ There are five collider types, that differ in the way how is their surface descr
  - Box - The surface is a box with custom width/height/depth
  - Box - The surface is a box with custom width/height/depth
  - Sphere - The surface is a sphere with a radius
  - Sphere - The surface is a sphere with a radius
  - Capsule - The surface is a capsule with a radius and a height
  - Capsule - The surface is a capsule with a radius and a height
- - Mesh - The surface is represented by custom geometry using with a triangle mesh
+ - Mesh - The surface is represented by custom geometry using a triangle mesh
   
   
 # Plane collider
 # Plane collider
 Represented by @ref bs::CPlaneCollider "PlaneCollider" component. Use @ref bs::CPlaneCollider::setNormal "CPlaneCollider::setNormal()" to provide the direction in which the plane is oriented in, and @ref bs::CPlaneCollider::setDistance "CPlaneCollider::setDistance()" to provide an offset along that direction. Using these two properties you can position and orient a plane anywhere in the scene.
 Represented by @ref bs::CPlaneCollider "PlaneCollider" component. Use @ref bs::CPlaneCollider::setNormal "CPlaneCollider::setNormal()" to provide the direction in which the plane is oriented in, and @ref bs::CPlaneCollider::setDistance "CPlaneCollider::setDistance()" to provide an offset along that direction. Using these two properties you can position and orient a plane anywhere in the scene.

+ 11 - 11
Documentation/Manuals/Native/User/creatingMeshes.md

@@ -1,10 +1,10 @@
-Creating meshes				{#creatingMeshes}
+Advanced meshes				{#creatingMeshes}
 ===============
 ===============
 
 
-In a previous chapter we have shown how to import meshes from external files, and in this chapter we'll learn how to create meshes manually. 
+In this chapter we'll learn how to create meshes manually and populate them with data. 
 
 
 # Creating a mesh
 # Creating a mesh
-To create a mesh call @ref bs::Mesh::create "Mesh::create" or one if its overloads. You'll need to populate the @ref bs::MESH_DESC "MESH_DESC" structure and pass it as a parameter. At minimum the structure requires you to provide:
+To create a mesh call @ref bs::Mesh::create "Mesh::create()" or one if its overloads. You'll need to populate the @ref bs::MESH_DESC "MESH_DESC" structure and pass it as a parameter. At minimum the structure requires you to provide:
  - @ref bs::MESH_DESC::numVertices "MESH_DESC::numVertices" - Number of vertices in the mesh
  - @ref bs::MESH_DESC::numVertices "MESH_DESC::numVertices" - Number of vertices in the mesh
  - @ref bs::MESH_DESC::numIndices "MESH_DESC::numIndices" - Number of indices in the mesh
  - @ref bs::MESH_DESC::numIndices "MESH_DESC::numIndices" - Number of indices in the mesh
  - @ref bs::MESH_DESC::vertexDesc "MESH_DESC::vertexDesc" - Structure of type @ref bs::VertexDataDesc "VertexDataDesc" that describes what kind of data does each individual vertex contains, which we'll discuss in detail later.
  - @ref bs::MESH_DESC::vertexDesc "MESH_DESC::vertexDesc" - Structure of type @ref bs::VertexDataDesc "VertexDataDesc" that describes what kind of data does each individual vertex contains, which we'll discuss in detail later.
@@ -51,7 +51,7 @@ You may also specify these optional properties, primarily useful for low-level r
 Once the **VertexDataDesc** structure has been filled, you can use it for initializing a **Mesh** as shown above.
 Once the **VertexDataDesc** structure has been filled, you can use it for initializing a **Mesh** as shown above.
 
 
 # Writing mesh data
 # Writing mesh data
-After mesh has been created you need to write some vertex and index data to it by calling @ref bs::Mesh::writeData "Mesh::writeData". This method accepts a @ref bs::MeshData "MeshData" object.
+After mesh has been created you need to write some vertex and index data to it by calling @ref bs::Mesh::writeData "Mesh::writeData()". This method accepts a @ref bs::MeshData "MeshData" object.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 SPtr<MeshData> meshData = ...; // Explained below
 SPtr<MeshData> meshData = ...; // Explained below
@@ -61,7 +61,7 @@ mesh->writeData(meshData);
 ~~~~~~~~~~~~~ 
 ~~~~~~~~~~~~~ 
 
 
 ## Creating mesh data
 ## Creating mesh data
-You can create @ref bs::MeshData "MeshData" by calling @ref bs::MeshData::create(UINT32, UINT32, const SPtr<VertexDataDesc>&, IndexType) "MeshData::create" and providing it with vertex description, index type and number of vertices and indices. You must ensure that the formats and sizes match the mesh this will be used on.
+You can create @ref bs::MeshData "MeshData" by calling @ref bs::MeshData::create(UINT32, UINT32, const SPtr<VertexDataDesc>&, IndexType) "MeshData::create()" and providing it with vertex description, index type and number of vertices and indices. You must ensure that the formats and sizes match the mesh this will be used on.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Create mesh data able to contain 8 vertices of the format specified by vertexDesc, and 36 indices
 // Create mesh data able to contain 8 vertices of the format specified by vertexDesc, and 36 indices
@@ -77,7 +77,7 @@ SPtr<MeshData> vertexDesc = mesh->allocBuffer();
 ## Populating mesh data
 ## Populating mesh data
 Once **MeshData** has been created you need to populate it with vertices and indices. This can be done in a few ways.
 Once **MeshData** has been created you need to populate it with vertices and indices. This can be done in a few ways.
 
 
-The most basic way is setting the data by using @ref bs::MeshData::setVertexData "MeshData::setVertexData" which set vertex data for a single vertex element all at once.
+The most basic way is setting the data by using @ref bs::MeshData::setVertexData "MeshData::setVertexData()" which set vertex data for a single vertex element all at once.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Fill out the data for the 0th VES_POSITION element
 // Fill out the data for the 0th VES_POSITION element
@@ -89,7 +89,7 @@ for(UINT32 i = 0; i < 8; i++)
 meshData->setVertexData(VES_POSITION, myVertexPositions, sizeof(myVertexPositions));
 meshData->setVertexData(VES_POSITION, myVertexPositions, sizeof(myVertexPositions));
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-You can also use @ref bs::MeshData::getElementData "MeshData::getElementData" which will return a pointer to the starting point of the vertex data for a specific element. You can then iterate over the pointer to read/write values. Make sure to use @ref bs::VertexDataDesc::getVertexStride "VertexDataDesc::getVertexStride" to know how many bytes to advance between elements. This ensures you don't need to create an intermediate buffer like we did above.
+You can also use @ref bs::MeshData::getElementData "MeshData::getElementData()" which will return a pointer to the starting point of the vertex data for a specific element. You can then iterate over the pointer to read/write values. Make sure to use @ref bs::VertexDataDesc::getVertexStride "VertexDataDesc::getVertexStride()" to know how many bytes to advance between elements. This ensures you don't need to create an intermediate buffer like we did above.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Fill out the data for the 0th VES_POSITION element
 // Fill out the data for the 0th VES_POSITION element
@@ -105,10 +105,10 @@ for(UINT32 i = 0; i < 8; i++)
 }
 }
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-And finally you can use iterators: @ref bs::MeshData::getVec2DataIter "MeshData::getVec2DataIter", @ref bs::MeshData::getVec3DataIter "MeshData::getVec3DataIter", @ref bs::MeshData::getVec4DataIter "MeshData::getVec4DataIter", @ref bs::MeshData::getDWORDDataIter "MeshData::getDWORDDataIter". They are similar to the previous example but you don't need to manually worry about the vertex stride, or going outside of valid bounds.
+And finally you can use iterators: @ref bs::MeshData::getVec2DataIter "MeshData::getVec2DataIter()", @ref bs::MeshData::getVec3DataIter "MeshData::getVec3DataIter()", @ref bs::MeshData::getVec4DataIter "MeshData::getVec4DataIter()", @ref bs::MeshData::getDWORDDataIter "MeshData::getDWORDDataIter()". They are similar to the previous example but you don't need to manually worry about the vertex stride, or going outside of valid bounds.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
-// Fill out the data for the 0th VES_POSITION element
+// Fill out the data for the VES_POSITION element
 auto iter = meshData->getVec3DataIter(VES_POSITION);
 auto iter = meshData->getVec3DataIter(VES_POSITION);
 
 
 Vector3 myPosition(0, 0, 0)
 Vector3 myPosition(0, 0, 0)
@@ -118,7 +118,7 @@ do {
 } while(vecIter.addValue(myPosition)); // // Automatically advances the iterator, and returns false when there's no more room
 } while(vecIter.addValue(myPosition)); // // Automatically advances the iterator, and returns false when there's no more room
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-Writing indices is simpler and is done through @ref bs::MeshData::getIndices32 "MeshData::getIndices32" or @ref bs::MeshData::getIndices16 "MeshData::getIndices16" depending if the indices are 32 or 16 bit. The returned value is a pointer to the index buffer you can use to read/write the indices directly.
+Writing indices is simpler and is done through @ref bs::MeshData::getIndices32 "MeshData::getIndices32()" or @ref bs::MeshData::getIndices16 "MeshData::getIndices16()" depending if the indices are 32 or 16 bit. The returned value is a pointer to the index buffer you can use to read/write the indices directly.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Write 6 32-bit indices
 // Write 6 32-bit indices
@@ -148,7 +148,7 @@ mesh->readCachedData(*meshData);
 After reading the data you can access it through @ref bs::MeshData::getVertexData "PixelData::getVertexData()", @ref bs::MeshData::getElementData "PixelData::getElementData()" or through iterators.
 After reading the data you can access it through @ref bs::MeshData::getVertexData "PixelData::getVertexData()", @ref bs::MeshData::getElementData "PixelData::getElementData()" or through iterators.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
-// Read the data for the 0th VES_POSITION element, using iterators
+// Read the data for the VES_POSITION element, using iterators
 auto iter = meshData->getVec3DataIter(VES_POSITION);
 auto iter = meshData->getVec3DataIter(VES_POSITION);
 
 
 UINT32 numVertices = meshData->getNumVertices();
 UINT32 numVertices = meshData->getNumVertices();

+ 8 - 6
Documentation/Manuals/Native/User/customComponents.md

@@ -1,4 +1,4 @@
-Creating custom components						{#customComponents}
+Creating components						{#customComponents}
 ===============
 ===============
 
 
 So far we have talked about using built-in components like @ref bs::CCamera "Camera" and @ref bs::CRenderable "Renderable", but another major way you'll be using components is to create your own. Components serve as the main place to put your gameplay logic in, and this is where you'll be adding a majority of your custom code when creating a game.
 So far we have talked about using built-in components like @ref bs::CCamera "Camera" and @ref bs::CRenderable "Renderable", but another major way you'll be using components is to create your own. Components serve as the main place to put your gameplay logic in, and this is where you'll be adding a majority of your custom code when creating a game.
@@ -84,6 +84,8 @@ private:
 
 
 > Use @ref bs::Component::SO() "Component::SO()" to access the scene object the component is attached to.
 > Use @ref bs::Component::SO() "Component::SO()" to access the scene object the component is attached to.
 		
 		
+> **gTime()** method provides access to a variety of timing related functionality, and is explained later in the [timing manual](@ref time).
+		
 # Component handle
 # Component handle
 You will also likely want to declare a handle you can use to easily access the component, same as **HCamera** or **HRenderable**. This is done by simply creating a *typedef* on the @ref bs::GameObjectHandle<T> "GameObjectHandle<T>" object.
 You will also likely want to declare a handle you can use to easily access the component, same as **HCamera** or **HRenderable**. This is done by simply creating a *typedef* on the @ref bs::GameObjectHandle<T> "GameObjectHandle<T>" object.
 
 
@@ -105,10 +107,10 @@ HCamera camera = cameraSO->addComponent<CCamera>(primaryWindow);
 HCameraFlyer = cameraSO->addComponent<CCameraFlyer>();
 HCameraFlyer = cameraSO->addComponent<CCameraFlyer>();
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-# Advanced
-Now that we have the basics covered, there are a few more things to know that can be useful when building more advanced components.
+# Data
+Often a component will contain some data which you want to persist after the scene is saved, which requires you to implement a special interface on your custom component class. We'll talk more about this in the next chapter.
 
 
-## Activating/deactivating a scene object
+# Activating/deactivating a scene object
 Any scene object can be temporarily de-activated and reactivated by calling @ref bs::SceneObject::setActive "SceneObject::setActive()". When a scene object is deactivated its components will not have **Component::update()** called.
 Any scene object can be temporarily de-activated and reactivated by calling @ref bs::SceneObject::setActive "SceneObject::setActive()". When a scene object is deactivated its components will not have **Component::update()** called.
 
 
 Your component can also be notified at the exact moment when activation/deactivation happens. This way you can perform additional functionality if needed. Override @ref bs::Component::onEnabled "Component::onEnabled" and @ref bs::Component::onDisabled "Component::onDisabled" to get notified every time a component is activated and deactivated, respectively.
 Your component can also be notified at the exact moment when activation/deactivation happens. This way you can perform additional functionality if needed. Override @ref bs::Component::onEnabled "Component::onEnabled" and @ref bs::Component::onDisabled "Component::onDisabled" to get notified every time a component is activated and deactivated, respectively.
@@ -133,7 +135,7 @@ class CCameraFlyer : public Component
 };
 };
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 		
 		
-## Getting notified on scene object change
+# Getting notified on scene object change
 Sometimes you want to get notified when the scene object the component is attached to moves or changes parents. You can do this by overriding the @ref bs::Component::onTransformChanged "Component::onTransformChanged()" method.
 Sometimes you want to get notified when the scene object the component is attached to moves or changes parents. You can do this by overriding the @ref bs::Component::onTransformChanged "Component::onTransformChanged()" method.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
@@ -157,7 +159,7 @@ class CCameraFlyer : public Component
 
 
 @ref bs::TransformChangedFlags "TransformChangedFlags" parameter will notify you whether the scene object moved, has changed parents, or both.
 @ref bs::TransformChangedFlags "TransformChangedFlags" parameter will notify you whether the scene object moved, has changed parents, or both.
 
 
-Note that **Component::onTransformChanged** will never trigger by default. You must first enable it by calling @ref bs::Component::setNotifyFlags "Component::setNotifyFlags". It accepts the same **TransformChangedFlags** parameter which tells the system in which cases should it trigger **Component::onTransformChanged**.
+Note that **Component::onTransformChanged** will never trigger by default. You must first enable it by calling @ref bs::Component::setNotifyFlags "Component::setNotifyFlags()". It accepts the same **TransformChangedFlags** parameter which tells the system in which cases should it trigger **Component::onTransformChanged**.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // We're just extending the component we defined above
 // We're just extending the component we defined above

+ 1 - 1
Documentation/Manuals/Native/User/fileSystem.md

@@ -2,7 +2,7 @@ File system									{#fileSystem}
 ===============
 ===============
 
 
 # Paths
 # Paths
-Instead of using strings for representing paths, Banshee uses the @ref bs::Path "Path" class. Aside from containing the path it provides a variety of other useful information and allows for path manipulation. It is recommended to always store paths using **Path** instead of strings.
+Instead of using strings for representing paths, bs::f uses the @ref bs::Path "Path" class. Aside from containing the path it provides a variety of other useful information and allows for path manipulation. It is recommended to always store paths using **Path** instead of strings.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 Path myPath = "C:/Path/To/File.txt";
 Path myPath = "C:/Path/To/File.txt";

+ 4 - 4
Documentation/Manuals/Native/User/guiElements.md

@@ -1,7 +1,7 @@
 GUI elements									{#guiElements}
 GUI elements									{#guiElements}
 ===============
 ===============
 
 
-A GUI element is a basic primitive GUI is constructed out of. They can be text, buttons, input boxes, images, scroll areas and more. We'll explain what the individual GUI element types are later, but initially we'll focus on functionality common to all GUI elements.
+A GUI element is a basic primitive that GUI is constructed out of. They can be text, buttons, input boxes, images, scroll areas and more. We'll explain what the individual GUI element types are later, but initially we'll focus on functionality common to all GUI elements.
 
 
 # Displaying a GUI element
 # Displaying a GUI element
 In order to display a GUI element we must first create it. All GUI elements are created using a static *create* method.
 In order to display a GUI element we must first create it. All GUI elements are created using a static *create* method.
@@ -86,7 +86,7 @@ label->setVisible(true);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # GUI element types
 # GUI element types
-Banshee provides a large library of existing GUI element types. We'll focus on explaining the most important ones, but you can find an exhaustive list in @ref GUI.
+bs::f provides a large library of existing GUI element types. We'll focus on explaining the most important ones, but you can find an exhaustive list in @ref GUI.
 
 
 ## Label
 ## Label
 A label is the most basic of GUI elements, that allows no user interaction and just displays a textual string. It is created with @ref bs::GUILabel::create "GUILabel::create()", which accepts a string as input.
 A label is the most basic of GUI elements, that allows no user interaction and just displays a textual string. It is created with @ref bs::GUILabel::create "GUILabel::create()", which accepts a string as input.
@@ -132,7 +132,7 @@ mainPanel->addElement(guiTexture);
 ![Texture](guiTexture.png) 
 ![Texture](guiTexture.png) 
  
  
 ## Button
 ## Button
-A button GUI element displays a textural string or an image and reports events about user interaction with the button.
+A button GUI element displays a textual string or an image and reports events about user interaction with the button.
 
 
 GUI elements that can have either text or image contents (or both) accept a @ref bs::GUIContent "GUIContent" structure in their *create* and *setContent* functions. It is just a container and constructed simply:
 GUI elements that can have either text or image contents (or both) accept a @ref bs::GUIContent "GUIContent" structure in their *create* and *setContent* functions. It is just a container and constructed simply:
 
 
@@ -346,7 +346,7 @@ Once created you can retrieve the current position of the slider by calling @ref
 float curSliderPosition = sliderHorz->getPercent();
 float curSliderPosition = sliderHorz->getPercent();
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-You can also get notified immediately when whe slider handle moves by subscribing to the @ref bs::GUISlider::onChanged "GUISlider::onChanged" event.
+You can also get notified immediately when the slider handle moves by subscribing to the @ref bs::GUISlider::onChanged "GUISlider::onChanged" event.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 auto sliderPositionChanged = [](float percent)
 auto sliderPositionChanged = [](float percent)

+ 3 - 3
Documentation/Manuals/Native/User/guiLayouts.md

@@ -5,7 +5,7 @@ In previous chapter we talked about how **GUIPanel** is a special type of a GUI
 
 
 There are three types of layouts:
 There are three types of layouts:
  - @ref bs::GUIPanel "GUIPanel" - Does no automatic positioning and sizing of child GUI elements, instead user can set positions and sizes manually, as we have already seen. Each panel can be placed at a different depth, allowing GUI elements to overlay each other.
  - @ref bs::GUIPanel "GUIPanel" - Does no automatic positioning and sizing of child GUI elements, instead user can set positions and sizes manually, as we have already seen. Each panel can be placed at a different depth, allowing GUI elements to overlay each other.
- - @ref bs::GUILayoutX "GUILayoutX" - Automatically positions and sizes child GUI elements horizontally one next to each other, left to right. User is able to request minimum/maximum allowed size, but is unable to manually position the element.
+ - @ref bs::GUILayoutX "GUILayoutX" - Automatically positions and sizes child GUI elements horizontally one next to each other, left to right. User is able to request minimum/maximum allowed size, but is unable to manually position elements in the layout.
  - @ref bs::GUILayoutY "GUILayoutY" - Same as **GUILayoutX** only elements are positioned vertically, top to bottom.
  - @ref bs::GUILayoutY "GUILayoutY" - Same as **GUILayoutX** only elements are positioned vertically, top to bottom.
 
 
 You will find that vertical and horizontal layouts come in handy when you need to design GUI that needs to scale across various screen sizes/resolutions. By adding your GUI elements to such layouts instead of manually positioning them, ensures the GUI system can always keep them at optimal position and size, regardless of the available screen area.
 You will find that vertical and horizontal layouts come in handy when you need to design GUI that needs to scale across various screen sizes/resolutions. By adding your GUI elements to such layouts instead of manually positioning them, ensures the GUI system can always keep them at optimal position and size, regardless of the available screen area.
@@ -92,10 +92,10 @@ for(int i = 0; i < 5; i++)
 ![Vertical layout](layoutVertical.png) 
 ![Vertical layout](layoutVertical.png) 
 
 
 # Customizing automatic layouts
 # Customizing automatic layouts
-Even though vertical & horizontal layouts are automatic, Banshee provides a variety of mechanisms that allow you to customize the position and size of GUI elements in such layouts.
+Even though vertical & horizontal layouts are automatic, bs::f provides a variety of mechanisms that allow you to customize the position and size of GUI elements in such layouts.
 
 
 ## Flexible size
 ## Flexible size
-Each GUI element can have a flexible size that determines its minimum & maximum allowed width/height. This is in contrast to the fixed size we were setting in the example above.
+Each GUI element can have a flexible size that determines its minimum & maximum allowed width/height. This is in contrast to the fixed size we were setting in the previous examples.
 
 
 When a flexible size is set a GUI layout is allowed to resize the element to best fit the layout area, within the provided size range. Flexible size can be set by calling @ref bs::GUIElement::setFlexibleWidth "GUIElement::setFlexibleWidth()" and @ref bs::GUIElement::setFlexibleHeight "GUIElement::setFlexibleHeight()".
 When a flexible size is set a GUI layout is allowed to resize the element to best fit the layout area, within the provided size range. Flexible size can be set by calling @ref bs::GUIElement::setFlexibleWidth "GUIElement::setFlexibleWidth()" and @ref bs::GUIElement::setFlexibleHeight "GUIElement::setFlexibleHeight()".
 
 

+ 23 - 26
Documentation/Manuals/Native/User/guiSetup.md

@@ -1,7 +1,7 @@
 GUI setup									{#guiSetup}
 GUI setup									{#guiSetup}
 ===============
 ===============
 
 
-All GUI elements in Banshee are handled by a @ref bs::CGUIWidget "GUIWidget" component. Each such component must have an attached **Camera** component, which determines where will the rendered GUI elements be output. 
+All GUI elements in bs::f are handled by a @ref bs::CGUIWidget "GUIWidget" component. Each such component must have an attached **Camera** component, which determines where will the rendered GUI elements be output. 
 
 
 The camera is created in the same way as we shown before, and you can in-fact use the same camera you use for normal scene rendering. GUI elements will not be affected by camera's position, orientation or projection properties - they might however be affected by the size of the camera's render target.
 The camera is created in the same way as we shown before, and you can in-fact use the same camera you use for normal scene rendering. GUI elements will not be affected by camera's position, orientation or projection properties - they might however be affected by the size of the camera's render target.
 
 
@@ -36,10 +36,22 @@ mainPanel->addElement(GUILabel::create(HString(L"Hello!")));
 
 
 Don't worry about what **GUIPanel** or **GUILabel** mean at this time, we'll talk about GUI panels, elements and layouts in later chapters. 
 Don't worry about what **GUIPanel** or **GUILabel** mean at this time, we'll talk about GUI panels, elements and layouts in later chapters. 
 
 
+# Transforming GUI
+
+Once you have set up a **GUIWidget** component, you can transform it using its scene object as normal. This allows you to apply 3D transformations to GUI elements, which can be useful for various interesting effects, including rendering GUI to in-game surfaces (like on a screen of an in-game 3D monitor).
+
+~~~~~~~~~~~~~{.cpp}
+// Rotate 30 degrees around the Z axis
+Quaternion rotate(Vector3::UNIT_Z, Degree(30.0f));
+guiSO->setRotation(rotate);
+~~~~~~~~~~~~~
+
+@ref TODO_IMAGE
+
 # Using a separate GUI camera
 # Using a separate GUI camera
 In the example above we have asssumed you will use the same camera for GUI as you use for scene rendering. However sometimes it is useful to have a separate camera for GUI, or even multiple separate cameras. In such case camera creation is mostly the same, but with some additional options that need to be enabled. 
 In the example above we have asssumed you will use the same camera for GUI as you use for scene rendering. However sometimes it is useful to have a separate camera for GUI, or even multiple separate cameras. In such case camera creation is mostly the same, but with some additional options that need to be enabled. 
 
 
-Lets see how to create a camera that can be used for GUI rendering along with scene rendering. Initial creation of the camera is identical, we just choose a render target:
+Initial creation of the camera is identical, we just choose a render target:
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 SPtr<RenderWindow> primaryWindow = gApplication().getPrimaryWindow();
 SPtr<RenderWindow> primaryWindow = gApplication().getPrimaryWindow();
@@ -48,42 +60,27 @@ HSceneObject guiCameraSO = SceneObject::create("GUI camera");
 HCamera guiCamera = guiCameraSO->addComponent<CCamera>(primaryWindow);
 HCamera guiCamera = guiCameraSO->addComponent<CCamera>(primaryWindow);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-In order to prevent the camera from rendering scene objects, we enable the @ref bs::CameraFlag::Overlay "Overlay" option.
+In order to prevent the camera from rendering scene objects, we enable the **RenderSettings::overlayOnly** property on the camera's **RenderSettings** object.
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
-guiCamera->setFlag(CameraFlag::Overlay, true);
+auto rs = guiCamera->getRenderSettings();
+rs->overlayOnly = true;
+
+guiCamera->setRenderSettings(rs);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 Now our camera will render just overlay objects (GUI and sprites), and nothing else. 
 Now our camera will render just overlay objects (GUI and sprites), and nothing else. 
 
 
-Next, we need to make sure the camera renders after the main scene camera - if we don't then the scene rendering might render on top of the GUI. Using the @ref bs::CCamera::setPriority "CCamera::setPriority()" method we can control the order in which cameras sharing the same render target are rendered. By default all cameras have a priority of 0, and since we want our GUI camera to render after the scene camera, we set its priority to -1.
+Next, we want to prevent the camera from clearing the render target. By default cameras will set all the pixels in the render target to some default value before they start rendering, every frame. We want our GUI camera to just render on top of anything rendered by the scene camera, so we disable that functionality by retrieving a @ref bs::Viewport "Viewport" from the camera. 
 
 
-~~~~~~~~~~~~~{.cpp}
-guiCamera->setPriority(-1);
-~~~~~~~~~~~~~
-
-And finally, we want to prevent the camera from clearing the render target. By default cameras will set all the pixels in the render target to some default value before they start rendering, every frame. We want our GUI camera to just render on top of anything rendered by the scene camera, so we disable that functionality by retrieving a @ref bs::Viewport "Viewport" from the camera. 
-
-**Viewport** is retrieved by calling @ref bs::CCamera::getViewport "CCamera::getViewport()". It allows you to set how and if the render target is cleared through @ref bs::Viewport::setRequiresClear "Viewport::setRequiresClear()" and @ref bs::Viewport::setClearValues "Viewport::setClearValues()". Clear options can be set separately for color, depth and stencil buffers.
+**Viewport** is retrieved by calling @ref bs::CCamera::getViewport "CCamera::getViewport()". It allows you to set if the render target is cleared through @ref bs::Viewport::setClearFlags "Viewport::setClearFlags()" by providing the @ref bs::ClearFlagBits::Empty "ClearFlagBits::Empty" flag. 
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 SPtr<Viewport> viewport = guiCamera->getViewport();
 SPtr<Viewport> viewport = guiCamera->getViewport();
 
 
 // Disable clear for color, depth and stencil buffers
 // Disable clear for color, depth and stencil buffers
-viewport->setRequiresClear(false, false, false);
+viewport->setClearFlags(ClearFlagBits::Empty);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-> You can also use the viewport to control onto which portion of the render target should the camera render to. By default it will output to the entire render target but you can change the area by calling @ref bs::Viewport::setArea "Viewport::setArea()".
+You can also use the viewport to control onto which portion of the render target should the camera render to. By default it will output to the entire render target but you can change the area by calling @ref bs::Viewport::setArea "Viewport::setArea()".
 
 
 At this point you can use the camera to create a **GUIWidget** and use the GUI as normal.
 At this point you can use the camera to create a **GUIWidget** and use the GUI as normal.
-
-# Transforming GUI
-
-Once you have set up a **GUIWidget** component, you can transform it using its scene object as normal. This allows you to apply 3D transformations to GUI elements, which can be useful for various interesting effects, including rendering GUI to in-game surfaces (like on a screen of an in-game 3D monitor).
-
-~~~~~~~~~~~~~{.cpp}
-// Rotate 30 degrees around the Z axis
-Quaternion rotate(Vector3::UNIT_Z, Degree(30.0f));
-guiSO->setRotation(rotate);
-~~~~~~~~~~~~~
-
-@ref TODO_IMAGE

+ 3 - 3
Documentation/Manuals/Native/User/guiStyles.md

@@ -119,7 +119,7 @@ Dimensions are controlled by the following properties:
  - @ref bs::GUIElementStyle::maxHeight "GUIElementStyle::maxHeight"
  - @ref bs::GUIElementStyle::maxHeight "GUIElementStyle::maxHeight"
  - @ref bs::GUIElementStyle::fixedHeight "GUIElementStyle::fixedHeight"
  - @ref bs::GUIElementStyle::fixedHeight "GUIElementStyle::fixedHeight"
    
    
-When **GUIElementStyle::fixedWidth** or **GUIElementStyle::fixedHeight** is set to true, the system will use size values provided by **GUIElementStyle::width** and **GUIElementStyle::height** respectively. This is the same as calling **GUIElement::setSize**.
+When **GUIElementStyle::fixedWidth** or **GUIElementStyle::fixedHeight** is set to true, the system will use size values provided by **GUIElementStyle::width** and **GUIElementStyle::height** respectively. This is the same as calling **GUIElement::setSize()**.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Style that uses a fixed 50x20 size (e.g. a fixed size button)
 // Style that uses a fixed 50x20 size (e.g. a fixed size button)
@@ -131,7 +131,7 @@ style.height = 50;
 style.width = 20;
 style.width = 20;
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 		
 		
-When **GUIElementStyle::fixedWidth** or **GUIElementStyle::fixedHeight** are false, the system will instead use the min/max values provided by **GUIElementStyle::minWidth** / **GUIElementStyle::maxWidth** and **GUIElementStyle::minHeight** / **GUIElementStyle::maxHeight**, respectively. This is the same as calling **GUIElement::setFlexibleWidth** or **GUIElement::setFlexibleHeight**.
+When **GUIElementStyle::fixedWidth** or **GUIElementStyle::fixedHeight** are false, the system will instead use the min/max values provided by **GUIElementStyle::minWidth** / **GUIElementStyle::maxWidth** and **GUIElementStyle::minHeight** / **GUIElementStyle::maxHeight**, respectively. This is the same as calling **GUIElement::setFlexibleWidth()** or **GUIElement::setFlexibleHeight()**.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Style that uses a fixed 20px height and flexible width with minimum 30 pixels, and no limit on maximum size (e.g. a button that expands horizontally to fit its contents)
 // Style that uses a fixed 20px height and flexible width with minimum 30 pixels, and no limit on maximum size (e.g. a button that expands horizontally to fit its contents)
@@ -191,7 +191,7 @@ style.margins.bottom = 0;
 ### Borders
 ### Borders
 Normally when you set a texture for a GUI element state, the texture will stretch to fill out the area of the GUI element. Unless the texture is a uniform color or a repeatable pattern, that stretching will often look bad.
 Normally when you set a texture for a GUI element state, the texture will stretch to fill out the area of the GUI element. Unless the texture is a uniform color or a repeatable pattern, that stretching will often look bad.
 
 
-For this reason Banshee allows you to specify borders though @ref bs::GUIElementStyle::border "GUIElementStyle::border". By setting borders you split the image into 9 sections.
+For this reason bs::f allows you to specify borders though @ref bs::GUIElementStyle::border "GUIElementStyle::border". By setting borders you split the image into 9 sections.
 
 
 Center section will be resized uniformly as normal. Four corner sections will never be resized. Top and bottom sections will be resized only horizontally (fixed height), and left and right sections will be resized only vertically (fixed width). This allows you to use more complex images that still look good when resized.
 Center section will be resized uniformly as normal. Four corner sections will never be resized. Top and bottom sections will be resized only horizontally (fixed height), and left and right sections will be resized only vertically (fixed width). This allows you to use more complex images that still look good when resized.
 
 

+ 0 - 78
Documentation/Manuals/Native/User/imageBasedLighting.md

@@ -1,78 +0,0 @@
-Image based lighting						{#imageBasedLighting}
-===============
-
-Image based lighting is a form of lighting that uses cubemap textures to provide an approximation of the environment in order to provide more realistic lighting. This form of lighting is primarily important when rendering glossy reflections on metal surfaces that use physically based materials. Without some form of image based lighting set up in your scene those reflections will not reflect anything, and won't look proper. 
-
-It is of lesser importance for diffuse (non-specular) lighting, but can also provide a benefit that provides a cheap global illumination solution, by using irradiance maps to calculate indirect lighting.
-
-There are two main types of image based lights:
- - Skybox - Represented by the @ref bs::CSkybox "Skybox" component. 
- - Reflection probe - Represented by the @ref bs::CReflectionProbe "ReflectionProbe" component. 
- 
-# Skybox
-Skyboxes use a user-provided HDR cubemap texture in order to display an image of the sky when the camera is looking at the scene when no other object is ocluding the sky. The same image is also used to provide both specular reflections and indirect lighting on objects lit by the sky. By default all objects not in radius of a reflection probe (more about them below) is considered lit by the sky.
-
-Skybox is represented by the **Skybox** component, which requires only a HDR texture of the sky to work.
-
-~~~~~~~~~~~~~{.cpp}
-// Import a sky cubemap from a cylindrical (panoramic) image
-SPtr<ImportOptions> tio = TextureImportOptions::create();
-tio->setIsCubemap(true);
-tio->setCubemapSourceType(CubemapSourceType::Cylindrical);
-tio->setFormat(PF_FLOAT_R11G11B10); // Or the 16-bit floating point format
-
-HTexture skyTexture = gImporter().import<Texture>("MySkybox.hdr", tio);
-
-// Set up the skybox
-HSceneObject skyboxSO = SceneObject::create("Skybox");
-HSkybox skybox = skyboxSO->addComponent<CSkybox>();
-
-skybox->setTexture(skyTexture);
-~~~~~~~~~~~~~
-
-Optionally you might also want to increase or decrease the brightness of the sky by calling @ref bs::CSkybox::setBrightness "CSkybox::setBrightness()". Note that this will not effect the visual appearance of the sky but will only affect the lighting cast by the sky on other surfaces.
-
-## Combining sky and analytical lights
-Lights represented by the **Light** component we talked earlier are called analytical lights. When using an HDR skybox texture recorded outside of the engine you might need to tweak the texture so it works well with such analytical lights. Some cameras might record HDR values in non-physical units in which case sky might appear too bright or too dark, in which case analytical lights might appear over- or underpowered.
-
-Additionally, depending on your project, you might want to cut out the Sun from the recorded HDR image (for example, replace it with a bit of sky), and then use the directional analytical light for the Sun instead.
-
-# Reflection probes
-While the skybox is used to provide outdoor reflections, reflection probes are used to create reflection cubemaps for indoor environments. Reflection probes have an origin and a radius of influence. Reflection probes also use HDR cubemaps, but instead of using external textures those cubemaps are generated in-engine, at the position of the reflection probe. They are represented using the **ReflectionProbe** component.
-
-~~~~~~~~~~~~~{.cpp}
-HSceneObject reflProbeSO = SceneObject::create("Refl. probe");
-HReflectionProbe reflProbe = reflProbeSO->addComponent<CReflectionProbe>();
-~~~~~~~~~~~~~
-
-You must provide the extents of the geometry covered by the reflection probe. These extents serve both to determine a range of influence, and to approximate the surrounding geometry. For example if you are placing a reflection probe that covers a room, you should strive to match the reflection probe extents with the room walls.
-
-You can assign extents in two ways, depending on reflection probe type:
- - @ref bs::ReflectionProbeType::Box "ReflectionProbeType::Box" - Reflection probe is represented by a box and extents are set by calling @ref bs::CReflectionProbe::setExtents "CReflectionProbe::setExtents()".
- - @ref bs::ReflectionProbeType::Sphere "ReflectionProbeType::Sphere" - Reflection probe is represented by a sphere an extents are set by calling @ref bs::CReflectionProbe::setRadius "CReflectionProbe::setRadius()".
- 
-You can change the type of the reflection probe (and therefore extents) by calling @ref bs::CReflectionProbe::setType "CReflectionProbe::setType()".
-
-~~~~~~~~~~~~~{.cpp}
-reflProbe->setType(ReflectionProbeType::Box);
-reflProbe->setExtents(Vector3(2.0f, 2.0f, 2.0f));
-~~~~~~~~~~~~~
-
-## Generating reflection probe
-Reflection probe cubemap will be generated automatically when the reflection probe is first added to the scene, and whenever it is moved. You can also force the cubemap to regenerate by calling @ref bs::CReflectionProbe::generate "CReflectionProbe::generate()". This is required when surrounding geometry changes and you wish to update the probe cubemap.
-
-~~~~~~~~~~~~~{.cpp}
-reflProbe->generate();
-~~~~~~~~~~~~~
-
-## Using external textures
-In case you want to use an external HDR texture, similar to a skybox, you can call @ref bs::CReflectionProbe::setCustomTexture "CReflectionProbe::setCustomTexture()". The system will no longer use the automatically generated cubemap and use the provided one instead. If you wish to switch back to the automatic generator, call the method with a null value.
-
-~~~~~~~~~~~~~{.cpp}
-HTexture myCubemap = ...;
-
-reflProbe->setCustomTexture(myCubemap);
-~~~~~~~~~~~~~
-
-## Reflection probe interpolation
-When multiple reflection probes overlap the system will blend between the reflection probes based on the distance from the origin and the probe extents. If system can't blend with other reflection probes it will instead blend with the sky. This means in most cases you want to ensure that reflection probes overlap, in order to provide clean transitions.

+ 1 - 1
Documentation/Manuals/Native/User/importingAudio.md

@@ -1,7 +1,7 @@
 Importing audio 						{#importingAudio}
 Importing audio 						{#importingAudio}
 ===============
 ===============
 
 
-Audio in Banshee is represented in the form of an @ref bs::AudioClip "AudioClip" object. An audio clip is a resource, meaning it can be imported, saved and loaded as we described in the Resource manuals.
+Audio in bs::f is represented in the form of an @ref bs::AudioClip "AudioClip" object. An audio clip is a resource, meaning it can be imported, saved and loaded as we described in the resource manuals.
 
 
 Different audio file formats are supported depending on which audio backend is used:
 Different audio file formats are supported depending on which audio backend is used:
  - OpenAudio (default)
  - OpenAudio (default)

+ 1 - 1
Documentation/Manuals/Native/User/importingFonts.md

@@ -1,7 +1,7 @@
 Importing fonts 						{#importingFonts}
 Importing fonts 						{#importingFonts}
 ===============
 ===============
 
 
-Fonts control how text characters look and are used primarily throughout the GUI system. They are represented with the @ref bs::Font "Font" class. A font is a resource, meaning it can be imported, saved and loaded as we described in the Resource manuals.
+Fonts control how text characters look and are used primarily throughout the GUI system. They are represented with the @ref bs::Font "Font" class. A font is a resource, meaning it can be imported, saved and loaded as we described in the resource manual.
 
 
 Fonts can be imported from .TTF or .OTF formats using the importer.
 Fonts can be imported from .TTF or .OTF formats using the importer.
 
 

+ 54 - 0
Documentation/Manuals/Native/User/indirectLighting.md

@@ -0,0 +1,54 @@
+Indirect lighting				{#indirectLighting}
+===============
+
+When you set up a **Light** component, surfaces lit by that light will only be lit if the surface is directly in the light path. But in real world the light bounces off surfaces, providing lighting to surfaces that are not in a direct path to the light. Indirect lighting provides an additional way to add realism to your scene by accounting for that non-direct lighting.
+
+Indirect lighting needs to be enabled through **RenderSettings::enableIndirectLighting** for the relevant **Camera**, as it is disabled by default.
+
+# Sky lighting
+If you have set up a **Skybox** and enabled indirect lighting your scene will immediately receive indirect lighting from the skybox, no additional settings are required.
+
+Note that you might want to tweak the HDR texture used by the skybox so it doesn't overpower normal lighting. This can happen because some cameras might record HDR values in non-physical units in which case sky might appear too bright or too dark, in which case analytical lights might appear over- or underpowered.
+
+# Light probes
+When it comes to indoors you must follow a similar approach as with reflection probes. Again you don't want indoors to receive indirect lighting from the sky, and must therefore set up an additional component. This is done through @ref bs::CLightProbeVolume "LightProbeVolume". 
+
+~~~~~~~~~~~~~{.cpp}
+// Set up a light probe volume
+HSceneObject lightProbeVolumeSO = SceneObject::create("LightProbeVolume");
+HLightProbeVolume lightProbeVolume = lightProbeVolumeSO->addComponent<CLightProbeVolume>();
+~~~~~~~~~~~~~
+
+**LightProbeVolume** allows you to set up light probes over the scene. The light probes will record lighting information at their position, and nearby surfaces will then use that information for indirect lighting. If a camera is outside of a light probe volume it will fall back to sky lighting. 
+
+## Placing probes
+You generally want to place light probes wherever there is a major change in lighting. The probes are fairly cheap performance-wise, and dozens can be used per a single room. 
+
+To add a probe to the volume call @ref bs::CLightProbeVolume::addProbe "CLightProbeVolume::addProbe()". The method only requires you to provide a position at which to place the probe at. The position is relative to the **SceneObject** the volume is attached to. 
+
+~~~~~~~~~~~~~{.cpp}
+// Register a couple of probes
+lightProbeVolume->addProbe(Vector3(0.0f, 1.0f, 0.0f)));
+lightProbeVolume->addProbe(Vector3(5.0f, 1.0f, 0.0f)));
+~~~~~~~~~~~~~
+
+## Generic probe positioning
+Note that when first created the volume will contain eight probes placed on the corners of a unit box, at the volume's location. You can call @ref bs::CLightProbeVolume::resize() "CLightProbeVolume::resize()" to change the size of the box for the eight probes. You can also increase the density in which case the probes will also be placed in-between the box corners, as a uniform grid. This is particularily useful if you do not feel like placing probes manually.
+
+~~~~~~~~~~~~~{.cpp}
+// Set up a probe volume using a uniform grid distribution of probes with a total of 50 probes
+AABox area(Vector3(-5, -5, -5), Vector3(5, 5, 5));
+Vector3I probeCount(5, 2, 5);
+
+lightProbeVolume->resize(area, probeCount);
+~~~~~~~~~~~~~
+
+## Rendering probes
+Once you have positioned the probes, you need to render them by calling @ref bs::CLightProbeVolume::renderProbes "CLightProbeVolume::renderProbes()". This will update the lighting information for all probes in a volume. You will want to do this any time you add/remove or move probes, or when the lighting environment changes.
+
+~~~~~~~~~~~~~{.cpp}
+// Update probes based on current scene
+lightProbeVolume->renderProbes();
+~~~~~~~~~~~~~
+
+The renderered probes will be saved with the component so you do not need to render them after scene load.

+ 1 - 1
Documentation/Manuals/Native/User/joints.md

@@ -3,7 +3,7 @@ Joints 						{#joints}
 
 
 Joints allow you to constrain movement of two rigidbodies in some way. A typical example would be a door hinge. 
 Joints allow you to constrain movement of two rigidbodies in some way. A typical example would be a door hinge. 
 
 
-Banshee supports six different joint types:
+bs::f supports six different joint types:
  - Fixed - Locks origins and orientations together
  - Fixed - Locks origins and orientations together
  - Distance - Keeps origins within a certain distance range
  - Distance - Keeps origins within a certain distance range
  - Spherical - Keeps origins together but allows rotation with no restrictions (also known as ball-and-socket joint)
  - Spherical - Keeps origins together but allows rotation with no restrictions (also known as ball-and-socket joint)

+ 1 - 1
Documentation/Manuals/Native/User/lights.md

@@ -55,7 +55,7 @@ This controls how strong is the light. Although you could technically control li
 
 
 In nature the range of light intensities varies highly - standing outside at a sunlit day may be hundreds or thousands of times brighter than standing indoors illuminated by a lightbulb. We won't perceive such a large difference because our eyes are able to adjust to different intensities.
 In nature the range of light intensities varies highly - standing outside at a sunlit day may be hundreds or thousands of times brighter than standing indoors illuminated by a lightbulb. We won't perceive such a large difference because our eyes are able to adjust to different intensities.
 
 
-Banshee uses a HDR algorithm to try to approximate this adjustment, which results in more realistic and higher quality lighting. Therefore it can be important for realism to set up the light intensities similar to what they would be in nature. 
+bs::f uses a HDR algorithm to try to approximate this adjustment, which results in more realistic and higher quality lighting. Therefore it can be important for realism to set up the light intensities similar to what they would be in nature. 
 
 
 Use @ref bs::CLight::setIntensity "CLight::setIntensity()" to change the light intensity. 
 Use @ref bs::CLight::setIntensity "CLight::setIntensity()" to change the light intensity. 
 
 

+ 1 - 1
Documentation/Manuals/Native/User/logging.md

@@ -1,7 +1,7 @@
 Logging	messages								{#logging}
 Logging	messages								{#logging}
 ===============
 ===============
 
 
-Logging can be a useful way to debug issues during development, or notify the user that an error occurred. In Banshee it is handled though the @ref bs::Debug "Debug" class. Use @ref bs::gDebug "gDebug()" for an easy way to access the **Debug** instance.
+Logging can be a useful way to debug issues during development, or notify the user that an error occurred. In bs::f it is handled though the @ref bs::Debug "Debug" class. Use @ref bs::gDebug "gDebug()" for an easy way to access the **Debug** instance.
 
 
 Use any of these methods to log a new message:
 Use any of these methods to log a new message:
  - @ref bs::Debug::logDebug "Debug::logDebug" - Logs an informative message.
  - @ref bs::Debug::logDebug "Debug::logDebug" - Logs an informative message.

+ 3 - 3
Documentation/Manuals/Native/User/math.md

@@ -1,7 +1,7 @@
 Math					{#mathUtilities}
 Math					{#mathUtilities}
 ===============
 ===============
 
 
-General purpose math functionality in Banshee is provided through the @ref bs::Math "Math" class. It provides a variety of familiar methods, such as @ref bs::Math::floor "Math::floor()", @ref bs::Math::clamp "Math::clamp()", @ref bs::Math::cos "Math::cos()" and many others. Check the API reference for a full list.
+General purpose math functionality in bs::f is provided through the @ref bs::Math "Math" class. It provides a variety of familiar methods, such as @ref bs::Math::floor "Math::floor()", @ref bs::Math::clamp "Math::clamp()", @ref bs::Math::cos "Math::cos()" and many others. Check the API reference for a full list.
 
 
 All other math functionality is provided through specific types, as listed below.
 All other math functionality is provided through specific types, as listed below.
 
 
@@ -57,7 +57,7 @@ printAngle(myAngle2);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # Quaternions
 # Quaternions
-@ref bs::Quaternion "Quaternion"%s are the primary way of representing rotations in Banshee. They can be created using Euler angles, axis/angle combination, or from a rotation matrix (talked about later). 
+@ref bs::Quaternion "Quaternion"%s are the primary way of representing rotations in bs::f. They can be created using Euler angles, axis/angle combination, or from a rotation matrix (talked about later). 
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Quaternion that rotates 30 degrees around X axis, followed by 50 degrees around Z axis (Euler angle representation)
 // Quaternion that rotates 30 degrees around X axis, followed by 50 degrees around Z axis (Euler angle representation)
@@ -335,7 +335,7 @@ They also provide a series of *intersects* methods that allow them to test for i
 
 
 # Shapes
 # Shapes
 
 
-Banshee supports a variety of other 3D shapes:
+bs::f supports a variety of other 3D shapes:
  - @ref bs::AABox "AABox"
  - @ref bs::AABox "AABox"
  - @ref bs::Sphere "Sphere"
  - @ref bs::Sphere "Sphere"
  - @ref bs::Plane "Plane"
  - @ref bs::Plane "Plane"

+ 3 - 3
Documentation/Manuals/Native/User/memory.md

@@ -1,12 +1,12 @@
 Memory allocation 						{#memory}
 Memory allocation 						{#memory}
 ===============
 ===============
-When allocating memory in Banshee it is prefered (but not required) to use Banshee's allocator functions instead of the standard *new* / *delete* operators or *malloc* / *free*.
+When allocating memory in bs::f it is prefered (but not required) to use bs::f allocator functions instead of the standard *new* / *delete* operators or *malloc* / *free*.
 
 
 Use @ref bs::bs_new "bs_new" instead of *new* and @ref bs::bs_delete "bs_delete" instead of *delete*.
 Use @ref bs::bs_new "bs_new" instead of *new* and @ref bs::bs_delete "bs_delete" instead of *delete*.
 Use @ref bs::bs_newN "bs_newN" instead of *new[]* and @ref bs::bs_deleteN "bs_deleteN" instead of *delete[]*.
 Use @ref bs::bs_newN "bs_newN" instead of *new[]* and @ref bs::bs_deleteN "bs_deleteN" instead of *delete[]*.
 Use @ref bs::bs_alloc "bs_alloc" instead of *malloc* and @ref bs::bs_free "bs_free" instead of *free*.
 Use @ref bs::bs_alloc "bs_alloc" instead of *malloc* and @ref bs::bs_free "bs_free" instead of *free*.
 
 
-This ensures the Banshee can keep track of all allocated memory, which ensures better debugging and profiling tools and ensures the internal memory allocation method can be changed in the future.
+This ensures the bs::f can keep track of all allocated memory, which ensures better debugging and profiling tools and ensures the internal memory allocation method can be changed in the future.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Helper structure
 // Helper structure
@@ -30,7 +30,7 @@ delete ptr;
 delete[] ptrArray;
 delete[] ptrArray;
 free(rawMem);
 free(rawMem);
 
 
-// Allocating memory the Banshee way
+// Allocating memory the bs::f way
 MyStruct* bsPtr = bs_new<MyStruct>(123, false);
 MyStruct* bsPtr = bs_new<MyStruct>(123, false);
 MyStruct** bsPtrArray = bs_newN<MyStruct>(5);
 MyStruct** bsPtrArray = bs_newN<MyStruct>(5);
 void* bsRawMem = bs_alloc(12);
 void* bsRawMem = bs_alloc(12);

+ 92 - 0
Documentation/Manuals/Native/User/nonComponentApproach.md

@@ -0,0 +1,92 @@
+Non-component approach				{#nonComponentApproach}
+===============
+
+bs::f provides an alternate API to its scene-object/component model. It provides a more traditional way of dealing with gameplay logic at the cost of more complexity, but with potentially more freedom. Note that this is a fully optional approach and just another way of doing what we have described so far.
+
+# Scene actors
+Each **Component** class has a corresponding @ref bs::SceneActor "SceneActor", which shares the same name as the component without the "C" prefix (e.g. @ref bs::Renderable "Renderable" scene actor vs. **CRenderable** component)). 
+
+> Note that throughout this manual we have often referred to components without using the "C" prefix, but in this manual we are referring to the scene actor.
+
+To create a scene actor you should call its static **create()** method. A scene actor is not attached to a **SceneObject** and it instead stands on its own.
+
+~~~~~~~~~~~~~{.cpp}
+// Creating a renderable actor
+SPtr<Renderable> renderable = Renderable::create();
+~~~~~~~~~~~~~
+
+Scene actors and components share most of the same functionality and methods, although scene actors sometimes require some additional setup (for example an **CAnimation** component is able to find its child **CBone** components automatically, but with scene actors you must connect them manually). Since the majority of the interface is the same we won't talk about individual actor types, and you can instead refer to the API reference.
+
+~~~~~~~~~~~~~{.cpp}
+HMesh mesh = ...;
+HMaterial material = ...;
+
+// Set up Renderable actor's mesh and material identically as we would a CRenderable component
+renderable->setMesh(mesh);
+renderable->setMaterial(material);
+~~~~~~~~~~~~~
+
+## Transform
+Each scene actor has a **Transform** object you can use to position and orient it in the scene. The transform can be accessed through @ref bs::SceneActor::getTransform() "SceneActor::getTransform()" and @ref bs::SceneActor::setTransform() "SceneActor::setTransform()". It can be manipulated the same as you would a **Transform** on a **SceneObject**. 
+
+~~~~~~~~~~~~~{.cpp}
+Transform tfrm = renderable->getTransform();
+tfrm.setPosition(Vector3(0.0f, 50.0f, 0.0f));
+renderable->setTransform(tfrm);
+~~~~~~~~~~~~~
+
+# Running custom logic
+When you use scene objects and components to set up your scene, you do so before calling the main loop. During the main loop the system will call various callbacks in which you can implement your game logic. But without components we need a different way of executing gameplay logic.
+
+To do this we need to change how we start the application. You will need to create your own version of the **Application** class by deriving from it. Once derived you can override any of the following methods:
+ - @ref bs::Application::onStartUp "Application::onStartUp()" - Called when the application is first starting up.
+ - @ref bs::Application::preUpdate "Application::preUpdate()" - Called every frame, just before component and plugin updates are triggered.
+ - @ref bs::Application::postUpdate "Application::postUpdate()" - Called every frame, after component and plugin updates are triggered.
+ - @ref bs::Application::onShutDown "Application::onShutDown()" - Called just before the application is about to shut down.
+ 
+~~~~~~~~~~~~~{.cpp}
+class MyApplication : public Application
+{
+public:
+	// Pass along the start-up structure to the parent
+	MyApplication(const START_UP_DESC& desc)
+		:Application(desc)
+	{ }
+
+private:
+	void onStartUp() override
+	{
+		// Ensure all parent systems are initialized first
+		Application::onStartUp();
+
+		// Do your own initialization
+	}
+
+	void onShutDown() override
+	{
+		// Do your own shut-down
+
+		// Shut-down engine components
+		Application::onShutDown();
+	}
+
+	void preUpdate() override
+	{
+		// Execute per-frame logic (optionally do it in postUpdate)
+	}
+};
+~~~~~~~~~~~~~
+
+Once you have your own application override, you can now start the application by calling @ref bs::Application::startUp<T> "Application::startUp<T>()" with the template parameter being your application override class. Everything else regarding start-up remains the same.
+
+~~~~~~~~~~~~~{.cpp}
+Application::startUp<MyApplication>(VideoMode(1280, 720), "My app", false);
+
+// Set up your scene here
+
+Application::instance().runMainLoop();
+Application::shutDown();
+~~~~~~~~~~~~~
+
+Having access to start-up, shut-down and update methods directly allows you to write your game logic as you see fit, without having to follow the **Component** interface.
+ 

+ 4 - 4
Documentation/Manuals/Native/User/offscreenRendering.md

@@ -6,9 +6,9 @@ When we talked about how to set up a **Camera** component we have shown that we
 We call rendering to a texture offscreen rendering. By rendering offscreen you can achieve advanced graphical effects by manipulating the contents of the rendered-to texture before presenting them to the user. 
 We call rendering to a texture offscreen rendering. By rendering offscreen you can achieve advanced graphical effects by manipulating the contents of the rendered-to texture before presenting them to the user. 
 
 
 # Creation
 # Creation
-Render texture must contain at least one color surface, and may optionally also contain a depth-stencil surface. Both of those surfaces are **Texture** objects, created with either **TU_RENDERTARGET** or **TU_DEPTHSTENCIL** usage flags, respectively, as demonstrated in the texture manipulation chapter. 
+Render texture must contain at least one color surface, and may optionally also contain a depth-stencil surface. Both of those surfaces are **Texture** objects, created with either **TU_RENDERTARGET** or **TU_DEPTHSTENCIL** usage flags, respectively, which we talked about earlier. 
 
 
-To create a render texture call @ref bs::RenderTexture::create(const RENDER_TEXTURE_DESC&) "RenderTexture::create" with a populated @ref bs::RENDER_TEXTURE_DESC "RENDER_TEXTURE_DESC" structure. This structure expects a reference to one or more color surface textures, and an optional depth-stencil surface texture. For each of those you must also specify the face and mip level onto which to render, in case your texture has multiple.
+To create a render texture call @ref bs::RenderTexture::create(const RENDER_TEXTURE_DESC&) "RenderTexture::create()" with a populated @ref bs::RENDER_TEXTURE_DESC "RENDER_TEXTURE_DESC" structure. This structure expects a reference to one or more color surface textures, and an optional depth-stencil surface texture. For each of those you must also specify the face and mip level onto which to render, in case your texture has multiple.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Create a 1920x1080 texture with 32-bit RGBA format
 // Create a 1920x1080 texture with 32-bit RGBA format
@@ -52,7 +52,7 @@ All color surfaces and the depth/stencil surface (if present) must have the same
 Render textures can be created with support for multiple samples per pixel. This allows affects such as multi-sampled antialiasing and similar. To create a multi-sampled render texture simply create a **Texture** with its `multisampleCount` parameter larger than one, which you then use to initialize a render texture. Make sure that all surfaces (including depth-stencil) in a render texture have the same number of samples.
 Render textures can be created with support for multiple samples per pixel. This allows affects such as multi-sampled antialiasing and similar. To create a multi-sampled render texture simply create a **Texture** with its `multisampleCount` parameter larger than one, which you then use to initialize a render texture. Make sure that all surfaces (including depth-stencil) in a render texture have the same number of samples.
 
 
 Multisampled textures cannot be used directly by materials or sampled in shaders. This means that before you can use such a texture for normal rendering you must first resolve its multi-sampled contents into a non-multisampled texture. You may do this in two ways:
 Multisampled textures cannot be used directly by materials or sampled in shaders. This means that before you can use such a texture for normal rendering you must first resolve its multi-sampled contents into a non-multisampled texture. You may do this in two ways:
- - Call @ref bs::ct::Texture::copy "ct::Texture::copy" with the source texture being your multisampled texture, and the destination being a texture of same dimensions and format, but with a single sample per pixel. Note this is a core-thread only method - we talk more about the core thread later.
+ - Call @ref bs::ct::Texture::copy "ct::Texture::copy()" with the source texture being your multisampled texture, and the destination being a texture of same dimensions and format, but with a single sample per pixel. Note this is a core-thread only method - we talk more about the core thread later.
  - Write a custom shader that manually reads samples from the texture and outputs pixels (out of the scope of this manual)
  - Write a custom shader that manually reads samples from the texture and outputs pixels (out of the scope of this manual)
 
 
 # Rendering to textures
 # Rendering to textures
@@ -78,7 +78,7 @@ someMaterial->setTexture("gInputTex", texture);
 Please note that a render texture must not be bound for rendering at the same time you are trying to read from it (either from shader of from the CPU). This will result in undefined behaviour.
 Please note that a render texture must not be bound for rendering at the same time you are trying to read from it (either from shader of from the CPU). This will result in undefined behaviour.
 
 
 # Priority
 # Priority
-All render targets have a priority that can be set by calling @ref bs::RenderTarget::setPriority "RenderTarget::setPriority". This priority can be used as a hint to the renderer in which order should the targets be rendered to. Targets with higher priority will be rendered to before targets with lower priority. This value is only used for render targets assigned to **Camera**%s, and this value is ignored if rendering using the low-level rendering API as in that case you have manual control over rendering order. This is useful if you are rendering to a texture which is used in a later stage as an input, in which case you can ensure the rendering to the texture happens first.
+All render targets have a priority that can be set by calling @ref bs::RenderTarget::setPriority "RenderTarget::setPriority()". This priority can be used as a hint to the renderer in which order should the targets be rendered to. Targets with higher priority will be rendered to before targets with lower priority. This value is only used for render targets assigned to **Camera**%s, and this value is ignored if rendering using the low-level rendering API as in that case you have manual control over rendering order. This is useful if you are rendering to a texture which is used in a later stage as an input, in which case you can ensure the rendering to the texture happens first.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 renderTexture->setPriority(50);
 renderTexture->setPriority(50);

+ 1 - 1
Documentation/Manuals/Native/User/physicalMaterial.md

@@ -3,7 +3,7 @@ Physics material						{#physicsMaterial}
 
 
 Physics material is a type of object that can be applied to a **Collider** to control the physical properties of its surface. In particular it can be used to control friction coefficients that determine how much damping is there when two objects are touching and moving laterally. As well as a restitution coefficient that determines how ellastic are collisions between two objects.
 Physics material is a type of object that can be applied to a **Collider** to control the physical properties of its surface. In particular it can be used to control friction coefficients that determine how much damping is there when two objects are touching and moving laterally. As well as a restitution coefficient that determines how ellastic are collisions between two objects.
 
 
-It is represented by @ref bs::PhysicsMaterial "PhysicsMaterial" and created by calling @ref bs::PhysicsMaterial::create "PhysicsMaterial::create()". It is a resource, and as such can be saved and loaded as described in the Resource manual.
+It is represented by @ref bs::PhysicsMaterial "PhysicsMaterial" and created by calling @ref bs::PhysicsMaterial::create "PhysicsMaterial::create()". It is a resource, and as such can be saved and loaded as described as any other resource.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Create physics material with default properties
 // Create physics material with default properties

+ 2 - 2
Documentation/Manuals/Native/User/physicsMesh.md

@@ -1,7 +1,7 @@
 Physics meshes						{#physicsMesh}
 Physics meshes						{#physicsMesh}
 ===============
 ===============
 
 
-Physics meshes are represented using the @ref bs::PhysicsMesh "PhysicsMesh" class. They are resources, meaning they can be imported, saved and loaded as we described in the Resource manuals.
+Physics meshes are represented using the @ref bs::PhysicsMesh "PhysicsMesh" class. They are resources, meaning they can be imported, saved and loaded as any other resource.
 
 
 ![Physics mesh](MeshCollider.png)  
 ![Physics mesh](MeshCollider.png)  
 
 
@@ -13,7 +13,7 @@ auto importOptions = MeshImportOptions::create();
 importOptions->setCollisionMeshType(CollisionMeshType::Normal);
 importOptions->setCollisionMeshType(CollisionMeshType::Normal);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-Then in order to perform actual import we call @ref bs::Importer::importAll "Importer::importAll()", instead of **Importer::import<T>()** we have been calling so far. **Importer::importAll()** should be used when import operation can return more than one resource since **Importer::import<T>()** will only return the default resource (i.e. a **Mesh**).
+Then in order to perform actual import we call @ref bs::Importer::importAll "Importer::importAll()", instead of **Importer::import<T>()** we have been calling so far. **Importer::importAll()** should be used when import operation can return more than one resource (since **Importer::import<T>()** will only return the default resource, i.e. a **Mesh**).
 
 
 **Importer::importAll()** will return an array of resources as @ref bs::SubResource "SubResource" objects. In our case there are two sub resources, first one is the normal **Mesh**, and the second one will be the physics mesh we requested.
 **Importer::importAll()** will return an array of resources as @ref bs::SubResource "SubResource" objects. In our case there are two sub resources, first one is the normal **Mesh**, and the second one will be the physics mesh we requested.
 
 

+ 1 - 1
Documentation/Manuals/Native/User/profiling.md

@@ -1,7 +1,7 @@
 Profiling				{#cpuProfiling}
 Profiling				{#cpuProfiling}
 ===============
 ===============
 
 
-Code profiling is an important process to determine performance bottlenecks. Profiling measures code execution times and memory allocations. Banshee provides a built-in profiler through the @ref bs::ProfilerCPU "ProfilerCPU" module. This module can be globally accessed through @ref bs::gProfilerCPU() "gProfilerCPU()".
+Code profiling is an important process to determine performance bottlenecks. Profiling measures code execution times and memory allocations. bs::f provides a built-in profiler through the @ref bs::ProfilerCPU "ProfilerCPU" module. This module can be globally accessed through @ref bs::gProfilerCPU() "gProfilerCPU()".
 
 
 The profiler allows you to profile blocks of code and output information about how long the block took to execute, as well as information about number and amount of memory allocations.
 The profiler allows you to profile blocks of code and output information about how long the block took to execute, as well as information about number and amount of memory allocations.
 
 

+ 48 - 0
Documentation/Manuals/Native/User/reflectionProbes.md

@@ -0,0 +1,48 @@
+Reflection environment						{#reflectionProbes}
+===============
+
+Setting up a valid reflection environment is essential for all types of physically based materials. The environment ensures that the specular reflections on the materials correctly reflect the surroundings. 
+
+@ref TODO_IMAGE
+
+A **Skybox** is one such example of a reflection environment. When one is present all materials will reflect the image displayed by the skybox. This is generally fine for open outdoor areas, but when the camera is indoors you don't want the indoor surfaces to reflect the sky. This is where @ref bs::CReflectionProbe "ReflectionProbe" component comes into play.
+
+# Reflection probes
+While the skybox is used to provide outdoor reflections, reflection probes are used to create reflection cubemaps for indoor environments. Reflection probes have an origin and a radius of influence. Reflection probes also use HDR cubemaps, but instead of using external textures those cubemaps are generated in-engine, at the position of the reflection probe. They are represented using the **ReflectionProbe** component.
+
+~~~~~~~~~~~~~{.cpp}
+HSceneObject reflProbeSO = SceneObject::create("Refl. probe");
+HReflectionProbe reflProbe = reflProbeSO->addComponent<CReflectionProbe>();
+~~~~~~~~~~~~~
+
+You must provide the extents of the geometry covered by the reflection probe. These extents serve both to determine a range of influence, and to approximate the surrounding geometry. For example if you are placing a reflection probe that covers a room, you should strive to match the reflection probe extents with the room walls. In practice you'll want to tweak it to what looks best.
+
+You can assign extents in two ways, depending on reflection probe type:
+ - @ref bs::ReflectionProbeType::Box "ReflectionProbeType::Box" - Reflection probe is represented by a box and extents are set by calling @ref bs::CReflectionProbe::setExtents "CReflectionProbe::setExtents()".
+ - @ref bs::ReflectionProbeType::Sphere "ReflectionProbeType::Sphere" - Reflection probe is represented by a sphere an extents are set by calling @ref bs::CReflectionProbe::setRadius "CReflectionProbe::setRadius()".
+ 
+You can change the type of the reflection probe (and therefore extents) by calling @ref bs::CReflectionProbe::setType "CReflectionProbe::setType()".
+
+~~~~~~~~~~~~~{.cpp}
+reflProbe->setType(ReflectionProbeType::Box);
+reflProbe->setExtents(Vector3(2.0f, 2.0f, 2.0f));
+~~~~~~~~~~~~~
+
+## Generating reflection probes
+Reflection probe cubemap will be generated automatically when the reflection probe is first added to the scene, and whenever it is moved. You can also force the cubemap to regenerate by calling @ref bs::CReflectionProbe::capture() "CReflectionProbe::capture()". This is required when surrounding geometry changes and you wish to update the probe cubemap.
+
+~~~~~~~~~~~~~{.cpp}
+reflProbe->capture();
+~~~~~~~~~~~~~
+
+## Using external textures
+In case you want to use an external HDR texture, similar to a skybox, you can call @ref bs::CReflectionProbe::setCustomTexture "CReflectionProbe::setCustomTexture()". The system will no longer use the automatically generated cubemap and use the provided one instead. If you wish to switch back to the automatic generator, call the method with a null value.
+
+~~~~~~~~~~~~~{.cpp}
+HTexture myCubemap = ...;
+
+reflProbe->setCustomTexture(myCubemap);
+~~~~~~~~~~~~~
+
+## Reflection probe interpolation
+When multiple reflection probes overlap the system will blend between the reflection probes based on the distance from the origin and the probe extents. If system can't blend with other reflection probes it will instead blend with the sky. This means in most cases you want to ensure that reflection probes overlap, in order to provide clean transitions. When the camera is outside the influence of any reflection probes the sky reflections will be used instead.

+ 69 - 9
Documentation/Manuals/Native/User/renderSettings.md

@@ -1,10 +1,25 @@
 Render settings						{#renderSettings}
 Render settings						{#renderSettings}
 ===============
 ===============
 
 
-@ref bs::RenderSettings "RenderSettings" is an object present on every **Camera** object. It can be retrieved through @ref bs::CCamera::getRenderSettings() "Camera::getRenderSettings()" and allows you to customize what rendering effects are executed when rendering the scene through that view. 
+@ref bs::RenderSettings "RenderSettings" is an object present on every **Camera** object. It can be retrieved through @ref bs::CCamera::getRenderSettings() "CCamera::getRenderSettings()" and allows you to customize what rendering effects are executed when rendering the scene through that view. 
 
 
 For complete list of tweakable properties check the API reference, and here we'll just cover the main points.
 For complete list of tweakable properties check the API reference, and here we'll just cover the main points.
 
 
+Note that after you change any of the properties in **RenderSettings** you must call @ref bs::CCamera::setRenderSettings() "CCamera::setRenderSettings()" to apply the changes to the camera.
+
+~~~~~~~~~~~~~{.cpp}
+HCamera camera = ...;
+
+// Tweak the render settings by disabling some effects
+auto rs = camera->getRenderSettings();
+rs->screenSpaceReflections.enabled = false;
+rs->ambientOcclusion.enabled = false;
+rs->enableIndirectLighting = false;
+rs->enableFXAA = false;
+
+camera->setRenderSettings(rs);
+~~~~~~~~~~~~~
+
 # HDR and tonemapping
 # HDR and tonemapping
 HDR stands for high-dynamic range, and it allows the lights in the scene to use a large range of intensity values that can more closely approximate a real-world scene. Lighting information is first written to a floating point texture that can store a wider range of values than a normal RGB texture. These high range lighting values are then used throughout the calculations in the engine, ensuring a higher quality final result. You can toggle HDR rendering through @ref bs::RenderSettings::enableHDR "RenderSettings::enableHDR".
 HDR stands for high-dynamic range, and it allows the lights in the scene to use a large range of intensity values that can more closely approximate a real-world scene. Lighting information is first written to a floating point texture that can store a wider range of values than a normal RGB texture. These high range lighting values are then used throughout the calculations in the engine, ensuring a higher quality final result. You can toggle HDR rendering through @ref bs::RenderSettings::enableHDR "RenderSettings::enableHDR".
 
 
@@ -17,26 +32,71 @@ HDR and tonemapping is also closely related to exposure, which we'll cover next.
 # Exposure
 # Exposure
 Exposure determines which part of the high range image should be converted to low range (e.g. the very bright parts, the very dark parts, or somewhere in the middle). Generally this is a property you will only use when HDR is enabled, as LDR doesn't offer a high enough range for this property to be relevant.
 Exposure determines which part of the high range image should be converted to low range (e.g. the very bright parts, the very dark parts, or somewhere in the middle). Generally this is a property you will only use when HDR is enabled, as LDR doesn't offer a high enough range for this property to be relevant.
 
 
-By default the system will calculate the exposure automatically, based on how the human eye determines exposure. Generally this means if you are in a very bright area, it will be hard to see into darker areas (imagine standing outside in sunlight and looking into house lit only by artifical light), or when in a very dark area the bright areas will be overexposed. As you move between areas of different light intensity the exposure will slowly adjust accordingly. You can tweak automatic exposure options through @ref bs::RenderSettings::autoExposure "RenderSettings::autoExposure".
+By default the system will calculate the exposure automatically, based on how the human eye determines exposure. Generally this means if you are in a very bright area, it will be hard to see into darker areas (imagine standing outside in sunlight and looking into house lit only by artifical light), or when in a very dark area the bright areas will be overexposed. As you move between areas of different light intensity the exposure will slowly adjust accordingly. You can tweak automatic exposure options through @ref bs::RenderSettings::autoExposure "RenderSettings::autoExposure" (similar to human eyes - imagine walking into a house on a brightly Sun-lit day, it takes a while for your eyes to adjust).
+
+Automatic exposure can be disabled through @ref bs::RenderSettings::enableAutoExposure "RenderSettings::enableAutoExposure". In this case you will want to set the exposure manually through @ref bs::RenderSettings::exposureScale "RenderSettings::exposureScale". This allows for more control over the exposure, which is sometimes required.
+
+@ref TODO_IMAGE
+Image with different exposure levels
+
+# White balance
+White balance is a process that occurs during tonemapping and therefore requires tonemapping to be enabled. It is intended to emulate the effect of human vision called 'chromatic adaptation', where our eyes are able to adjust to different lighting conditions while still being able to tell actual colors of a surface (e.g. a blue car illuminated by a strong red light still looks blue). 
+
+When it comes to virtual lighting our eyes cannot perform the same adapation, as they will adjust to the real-world environment instead of the in-game environment (e.g. to the room where your screen is) . The white balance process converts in-game lighting to some real-world lighting, ensuring this adjustment process is emulated as if you were in the in-game lighting environment. 
+
+By default the real-world lighting is assumed to be a room lit by daylight, but the exact environment can be controlled through @ref bs::RenderSettings::whiteBalance "RenderSettings::whiteBalance". Since you cannot assume the lighting environment your application will be viewed under, this might be best left for the user to tweak.
+
+@ref TODO_IMAGE
+Image with different white balance lighting environments
 
 
-Automatic exposure can be disabled through @ref bs::RenderSettings::enableAutoExposure "RenderSettings::enableAutoExposure". In this case you will want to set the exposure manually through @ref bs::RenderSettings::exposureScale "RenderSettings::exposureScale"
+# Color grading
+Color grading allows you to perform additional artistic control over the final image and tweak settings like contrast and saturation. These effects are not physically based in nature and can be tweaked for purely artistic control.
 
 
-# Screen space reflections
-This effect provides high quality, real-time reflections at a fairly low performance impact. The main limitation effect is that it is performed in screen-space, and therefore cannot reflect an object that's not currently on the screen. When reflection cannot be found the system will fall back onto reflection probes for reflections. The effect is also generally not suitable for perfect mirror-like reflections due to limited precision.
+The relevant options are present in @ref bs::RenderSettings::colorGrading "RenderSettings::colorGrading".
 
 
-You can control, and well as toggle the effect through @ref bs::RenderSettings::screenSpaceReflections "RenderSettings::screenSpaceReflections".
+@ref TODO_IMAGE
+Image with and without custom color grading
 
 
-# Screen space ambient occlusion
-TODO
+# Screen space reflections (SSR)
+This effect provides high quality, real-time reflections at a fairly low performance impact. The main limitation effect is that it is performed in screen-space, and therefore cannot reflect an object that's not currently on the screen. When reflection cannot be found the system will fall back onto reflection probes. The effect is also generally not suitable for perfect mirror-like reflections due to limited precision.
 
 
+You can control and toggle the effect through @ref bs::RenderSettings::screenSpaceReflections "RenderSettings::screenSpaceReflections".
 
 
-Also: White balance, color grading, FXAA, depth of field
+@ref TODO_IMAGE
+Left - SSR disabled, Right - SSR enabled
 
 
+# Screen space ambient occlusion (SSAO)
+This effect estimates ambient occlusion using screen-space information. The ambient occlusion is approximated by sampling the nearby geometry and determining the occlusion amount. More nearby geometry results in a higher occlusion value, and therefore the surface receiving less light. This produces more realistic lighting. Note that SSAO is by default applied only to indirect lighting, and as such it is mostly visible in shadows.
 
 
+You can control and toggle the effect through @ref bs::RenderSettings::ambientOcclusion "RenderSettings::ambientOcclusion".
+
+@ref TODO_IMAGE
+Left - SSAO disabled, Right - SSAO enabled
+
+# Depth of field
+By default the virtual camera focuses perfectly on all parts of the scene it views, but this is not the case with real-world cameras. Real-world cameras can instead set a focus distance at which the captured image will be perfectly in focus. Anything closer or further away from that distance will get progressively more out of focus (blurry). Depth of field effect emulates this functionality of the camera, allowing you to set a focus distance while blurring the near and/or far parts of the scene.
+
+Depth of field options can be tweaked through @ref bs::RenderSettings::depthOfField "RenderSettings::depthOfField".
+
+@ref TODO_IMAGE
+Left - DOF disabled, Right - DOF enabled
+
+# Fast approximate anti-aliasing (FXAA)
+This is a screen space effect that reduces the aliasing artifacts known as 'jaggies'. These artifacts occur when there are discontinuities while rendering a pixel, for example when a pixel contains an edge between two surfaces. This effect is an alternative to other anti-aliasing methods like MSAA. It is significantly faster than MSAA but can result in lower quality and greater overall bluriness of the resulting image.
+
+It can be toggled through @ref bs::RenderSettings::enableFXAA "RenderSettings::enableFXAA".
+
+@ref TODO_IMAGE
+Image without FXAA
+
+@ref TODO_IMAGE
+Image with FXAA
 
 
 # Gamma
 # Gamma
 Tweaks the gamma value that's applied to the image before being sent to the output device. Mainly affects the brightness of the image.
 Tweaks the gamma value that's applied to the image before being sent to the output device. Mainly affects the brightness of the image.
 
 
+Controlled through @ref bs::RenderSettings::gamma "RenderSettings::gamma".
+
 # Shadows
 # Shadows
 Shadow rendering for a specific view can be completely disabled through @ref bs::RenderSettings::enableShadows "RenderSettings::enableShadows". Shadow options that are view-specific can be controlled through @ref bs::RenderSettings::shadowSettings "RenderSettings::shadowSettings".
 Shadow rendering for a specific view can be completely disabled through @ref bs::RenderSettings::enableShadows "RenderSettings::enableShadows". Shadow options that are view-specific can be controlled through @ref bs::RenderSettings::shadowSettings "RenderSettings::shadowSettings".
 
 

+ 3 - 3
Documentation/Manuals/Native/User/rigidbodies.md

@@ -55,7 +55,7 @@ rigidbody->setFlags(RigidbodyFlag::AutoTensors);
 rigidbody->setFlags(RigidbodyFlag::AutoMass);
 rigidbody->setFlags(RigidbodyFlag::AutoMass);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-By properly distributing mass and density over child shapes you can achieve much more realistic simulation for complex objects (e.g. a car). For simple objects (e.g. a barrel, tree trunk) it's best to keep uniform mass density.
+By properly distributing mass and density over child shapes you can achieve much more realistic simulation for complex objects (e.g. a car). For simple objects (e.g. a barrel, a rock) it's best to keep uniform mass density.
 
 
 # Forces
 # Forces
 A rigidbody will not move until we apply some forces to it. Forces can be applied directly (as shown here), and indirectly by being hit by another rigidbody.
 A rigidbody will not move until we apply some forces to it. Forces can be applied directly (as shown here), and indirectly by being hit by another rigidbody.
@@ -74,7 +74,7 @@ You can also change the strength of gravity by changing its acceleration factor.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Set gravity to the value on the Moon
 // Set gravity to the value on the Moon
-gPhysics().setGravity(1.622f); // in m/s^2
+gPhysics().setGravity(Vector3(0, -1.622f, 0)); // in m/s^2
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 ## Manual forces
 ## Manual forces
@@ -181,7 +181,7 @@ In most cases you want both of these properties to be calculated automatically.
 rigidbody->setFlags(RigidbodyFlag::AutoTensors);
 rigidbody->setFlags(RigidbodyFlag::AutoTensors);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-If you wish to set them manually, you can instead call @ref bs::CRigidbody::setCenterOfMass "CRigidbody::setCenterOfMass()" and @ref bs::CRigidbody::setInertiaTensor "CRigidbody::setInertiaTensor()".
+If you wish to set them manually, you can instead call @ref bs::CRigidbody::setCenterOfMassPosition "CRigidbody::setCenterOfMassPosition()" and @ref bs::CRigidbody::setInertiaTensor "CRigidbody::setInertiaTensor()".
 
 
 # Sleep
 # Sleep
 For performance reasons, objects that are not moving or are barely moving will be put to sleep. This allows the physics system to avoid those objects in its calculations. Such objects will be automatically woken up when other objects interact with them, or you move them from code.
 For performance reasons, objects that are not moving or are barely moving will be put to sleep. This allows the physics system to avoid those objects in its calculations. Such objects will be automatically woken up when other objects interact with them, or you move them from code.

+ 90 - 0
Documentation/Manuals/Native/User/savingScene.md

@@ -0,0 +1,90 @@
+Saving a scene						{#savingScene}
+===============
+
+Once you have populated your scene with scene objects and components you will want to save it so you can easily load it later. The entire scene, as well as parts of the scene can be saved by creating a @ref bs::Prefab "Prefab".
+
+# Creating prefabs
+
+A prefab can be created by calling @ref bs::Prefab::create "Prefab::create()" method and providing the relevant **SceneObject**. You can retrieve the root **SceneObject** of the current scene by calling @ref bs::SceneManager::getRootNode() "SceneManager::getRootNode()", accessible through @ref bs::gSceneManager() "gSceneManager()". The second parameter of **Prefab::create()** controls whether the prefab represents an entire scene, or just a subset of scene objects.
+
+~~~~~~~~~~~~~{.cpp}
+// Get scene root
+HSceneObject sceneRoot = gSceneManager().getRootNode();
+
+// Create a prefab of some sub-object
+HSceneObject subObject = sceneRoot->findPath("Path/To/Some/Object");
+HPrefab partialPrefab = Prefab::create(subObject, false);
+
+// Create a prefab of the entire scene
+HPrefab scenePrefab = Prefab::create(sceneRoot, true);
+~~~~~~~~~~~~~
+
+# Saving & loading prefabs
+
+Once a prefab can be created it can be saved and loaded any other **Resource**.
+
+~~~~~~~~~~~~~{.cpp}
+// Save the prefabs we created previously
+gResources().save(partialPrefab, "partialPrefab.asset");
+gResources().save(scenePrefab, "scenePrefab.asset");
+
+// Then when ready, restore them
+HPrefab loadedPartialPrefab = gResources().load<Prefab>("partialPrefab.asset");
+HPrefab loadedScenePrefab = gResources().load<Prefab>("scenePrefab.asset");
+~~~~~~~~~~~~~
+
+# Instantiating prefabs
+
+After loading the prefab must be instantiated, creating a representation of the **SceneObject** hierarchy it contains. This is done by calling @ref bs::Prefab::instantiate() "Prefab::instantiate()" which returns a **SceneObject**. By default this scene object will be parented to the current scene root, but can then be manipulated as any other scene object. You can replace the current scene with a new **SceneObject** root by calling @ref bs::SceneManager::setRootNode() "SceneManager::setRootNode()".
+
+~~~~~~~~~~~~~{.cpp}
+// Instatiate the scene prefab and replace the current scene
+HSceneObject newSceneHierarchy = loadedScenePrefab->instantiate();
+gSceneManager().setRootNode(newSceneHierarchy);
+~~~~~~~~~~~~~
+
+Since prefabs can also be created from arbitrary sub-hierarcies, you can use them to create groups of scene objects and components that are commonly used together, and then re-use them throughout the scene.
+
+~~~~~~~~~~~~~{.cpp}
+// Make a couple of copies of the prefab and place them in different parts of the scene
+HSceneObject subObject1 = loadedPartialPrefab->instantiate();
+HSceneObject subObject2 = loadedPartialPrefab->instantiate();
+
+subObject1->setPosition(Vector3(10.0f, 0.0f, 0.0f));
+subObject1->setPosition(Vector3(50.0f, 0.0f, 0.0f));
+~~~~~~~~~~~~~
+
+# Resource manifest
+
+If your scene contains components that reference resources (e.g. a **Renderable** referencing a mesh or a material) you will also need to save a resource manifest along your scene. This is an important step as every scene will almost certainly reference some resources. The resource manifest allows the system to automatically find the referenced resources when loading the scene, even after application has been shutdown and started again. Without the manifest your scene will lose all references to any resources after attempting to load it in a new application session.
+
+A manifest can be retrieved from @ref bs::Resources::getResourceManifest() "Resources::getResourceManifest()". The method expect a manifest name, which will be "Default" for the default manifest. Resources will be registered in this manifest whenever you call **Resources::save()**. 
+
+> You can also create your own manifests and manage them manually but that is outside the scope of this topic. See the API reference for @ref bs::ResourceManifest "ResourceManifest".
+
+The manifest can then be saved by calling @ref bs::ResourceManifest::save() "ResourceManifest::save()". The method expects a file path in which to save the manifest in, as well as an optional path to which to make all the resources relative to. You will want to make the manifest relative to some folder so that you can relocate both the manifest and the resources and the system is still able to find them.
+
+~~~~~~~~~~~~~{.cpp}
+SPtr<ResourceManifest> manifest = gResources().getResourceManifest("Default");
+
+// Save the manifest as "myManifest.asset", with the assumption that all the resources
+// it references have been saved to the "C:/Data" folder
+ResourceManifest::save(manifest, "C:/myManifest.asset", "C:/Data");
+~~~~~~~~~~~~~
+
+> **SPtr** is a shared pointer, used in bs::f for most object instances that aren't components, scene objects or resources. It is covered later in the [smart pointers manual](@ref smartPointers).
+
+Before loading a **Prefab** you will need to restore the manifest by calling @ref bs::ResourceManifest::load "ResourceManifest::load()". Note that you only need to restore the manifest once when your application starts up (usually before any other resource loads).
+
+Loaded manifest should then be registered with **Resources** by calling @ref bs::Resources::registerResourceManifest "Resources::registerResourceManifest()".
+
+~~~~~~~~~~~~~{.cpp}
+// Load the manifest. Assume that the application has moved to "C:/Program Files (x86)/MyApp".
+SPtr<ResourceManifest> manifest = ResourceManifest::load("C:/Program Files (x86)/MyApp/myManifest.asset", "C:/Program Files (x86)/MyApp/Data");
+
+// Register the manifest
+gResources().registerResourceManifest(manifest);
+~~~~~~~~~~~~~
+
+
+

+ 18 - 18
Documentation/Manuals/Native/User/serializingObjects.md

@@ -1,29 +1,17 @@
-Serializing objects				{#serializingObjects}
+Persisting data				{#serializingObjects}
 ===============
 ===============
 
 
-Serializing an objects involves encoding the contents of an object, and (usually) saving it to a storage device so it can be decoded later. This process is used by the resources system to save/load all types of resources, and it is also used by the scene system for saving/loading the contents of all components in the scene.
+Often components will have data you will want to persist across application sessions (for example **Renderable** component needs to remember which **Mesh** and **Material** it references). This persistent data will be automatically saved when a scene is saved, and loaded along with the scene. This process is called data serialization.
 
 
-In order to make an object serializable you need to set up a special interface that allows the system to query information about the object, retrieve and set its data. This interface is known as Run Time Type Information (RTTI).
+In order to make an object serializable you need to set up a special interface that allows the system to query information about the object, retrieve and set its data. This interface is known as Run Time Type Information (RTTI). In this example we talk primarily about components, but the same interface can be used for resources and normal objects.
 
 
-In Banshee any object that is serializable, and therefore has RTTI, must implement the @ref bs::IReflectable "IReflectable" interface. If you are creating custom components or resources, **Component** and **Resource** base classes already derive from this interface so you don't need to specify it manually. The interface is simple, requiring you to implement two methods:
+Any object that is serializable (and therefore provides RTTI information) must implement the @ref bs::IReflectable "IReflectable" interface. If you are creating custom components or resources, **Component** and **Resource** base classes already derive from this interface so you don't need to specify it manually. The interface is simple, requiring you to implement two methods:
  - RTTITypeBase* getRTTI() const;
  - RTTITypeBase* getRTTI() const;
  - static RTTITypeBase* getRTTIStatic();
  - static RTTITypeBase* getRTTIStatic();
  
  
 Implementations of these methods will return an object containing all RTTI for a specific class. In the rest of this manual we'll focus on explaning how to create a RTTI class implementation returned by these methods.
 Implementations of these methods will return an object containing all RTTI for a specific class. In the rest of this manual we'll focus on explaning how to create a RTTI class implementation returned by these methods.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
-// IReflectable implementation for a normal class
-class MyClass : public IReflectable
-{
-	// ...class members...
-
-	static RTTITypeBase* getRTTIStatic()
-	{ return MyClassRTTI::instance(); }
-
-	RTTITypeBase* getRTTI() const override
-	{ return MyClass::getRTTIStatic(); }
-};
-
 // IReflectable implementation for a component
 // IReflectable implementation for a component
 class MyComponent : public Component
 class MyComponent : public Component
 {
 {
@@ -40,6 +28,18 @@ public:
 	RTTITypeBase* getRTTI() const override
 	RTTITypeBase* getRTTI() const override
 	{ return MyComponent::getRTTIStatic(); }
 	{ return MyComponent::getRTTIStatic(); }
 };
 };
+
+// IReflectable implementation for a normal class
+class MyClass : public IReflectable
+{
+	// ...class members...
+
+	static RTTITypeBase* getRTTIStatic()
+	{ return MyClassRTTI::instance(); }
+
+	RTTITypeBase* getRTTI() const override
+	{ return MyClass::getRTTIStatic(); }
+};
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # Creating the RTTI object
 # Creating the RTTI object
@@ -114,7 +114,7 @@ public:
 };
 };
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-> Note that when creating new instances of components within RTTI class, you must use **GameObjectRTTI::createGameObject<T>()** method, instead of just creating the object normally.
+> Note that when creating new instances of components within RTTI class, you must use **GameObjectRTTI::createGameObject<T>()** method, instead of just creating a normal shared pointer.
 
 
 This is the minimal amount of work you need to do in order to implement RTTI. The RTTI types above now describe the class type, but not any of its members. In order to actually have class data serialized, you also need to define member fields.
 This is the minimal amount of work you need to do in order to implement RTTI. The RTTI types above now describe the class type, but not any of its members. In order to actually have class data serialized, you also need to define member fields.
 
 
@@ -242,7 +242,7 @@ public:
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # Using RTTI
 # Using RTTI
-Once the RTTI has been created, in most cases it will be used automatically. In particular it will be automatically used if implemented for components or resources. If you implement it for normal classes, you might want to know how to use it manually.
+Once the RTTI has been created, in most cases it will be used automatically. In the case of components it will be used when saving/loading a scene, and in the case of resources it will be used when saving/loading a resource. But for any other class you will want to know how to utilize it manually.
 
 
 To manually serialize an object you can use the @ref bs::FileEncoder "FileEncoder" class. Create the file encoder with a path to the output file, followed by a call to @ref bs::FileEncoder::encode "FileEncoder::encode()" with the object to encode as the parameter. The system will encode the provided object, as well as any other referenced **IReflectable** objects. 
 To manually serialize an object you can use the @ref bs::FileEncoder "FileEncoder" class. Create the file encoder with a path to the output file, followed by a call to @ref bs::FileEncoder::encode "FileEncoder::encode()" with the object to encode as the parameter. The system will encode the provided object, as well as any other referenced **IReflectable** objects. 
 
 

+ 32 - 0
Documentation/Manuals/Native/User/skybox.md

@@ -0,0 +1,32 @@
+Skybox						{#skybox}
+===============
+
+Skyboxes use a user-provided cubemap texture in order to display an image of the sky when the camera is looking at the scene when no other object is occluding the sky. The same image is also used to provide both specular reflections and indirect lighting on objects lit by the sky, but we will cover these effects later.
+
+@ref TODO_IMAGE
+
+Skybox is represented by the @ref bs::CSkybox "Skybox" component, which requires only a texture of the sky to work. The texture should ideally be in high dynamic range, unless your application is not using HDR. The skybox texture can be set through @ref bs::CSkybox::setTexture "CSkybox::setTexture()".
+
+~~~~~~~~~~~~~{.cpp}
+// Import a sky cubemap from a cylindrical (panoramic) image
+SPtr<ImportOptions> tio = TextureImportOptions::create();
+tio->setIsCubemap(true);
+tio->setCubemapSourceType(CubemapSourceType::Cylindrical);
+tio->setFormat(PF_FLOAT_R11G11B10); // Or the 16-bit floating point format
+
+HTexture skyTexture = gImporter().import<Texture>("MySkybox.hdr", tio);
+
+// Set up the skybox
+HSceneObject skyboxSO = SceneObject::create("Skybox");
+HSkybox skybox = skyboxSO->addComponent<CSkybox>();
+
+skybox->setTexture(skyTexture);
+~~~~~~~~~~~~~
+
+Note that importing a cubemap texture requires special texture import options @ref bs::TextureImportOptions::setIsCubemap "TextureImportOptions::setIsCubemap()" and @ref bs::TextureImportOptions::setCubemapSourceType "TextureImportOptions::setCubemapSourceType()". The second property expects you to provide the a @ref bs::CubemapSourceType "CubemapSourceType" that defines in what format is the texture stored in. The formats are:
+ - **CubemapSourceType::Cylindrical** - The source is a typical panoramic image. This is the most common format.
+ - **CubemapSourceType::Spherical** - The source is an image captured off a surface of a sphere. This is an older format that is less commonly used today.
+ - **CubemapSourceType::Single** - The source is a normal 2D texture. All cubemap faces will use the same image.
+ - **CubemapSourceType::Faces** - The source image contains cubemap faces laid out in the "cross" pattern, either vertically or horizontally.
+
+Aside from setting a texture you might also want to increase or decrease the brightness of the sky by calling @ref bs::CSkybox::setBrightness "CSkybox::setBrightness()". Note that this will not effect the visual appearance of the sky but will only affect the lighting cast by the sky on other surfaces.

+ 3 - 3
Documentation/Manuals/Native/User/smartPointers.md

@@ -6,7 +6,7 @@ Smart pointers allow the user to allocate objects dynamically (i.e. like using *
 # Unique pointers
 # Unique pointers
 Unique pointers hold ownership of a dynamically allocated object, and automatically free it when they go out of scope. As their name implies they cannot be copied - in other words, only one pointer to that object can exist. They are mostly useful for temporary allocations, or for places where object ownership is clearly defined to a single owner.
 Unique pointers hold ownership of a dynamically allocated object, and automatically free it when they go out of scope. As their name implies they cannot be copied - in other words, only one pointer to that object can exist. They are mostly useful for temporary allocations, or for places where object ownership is clearly defined to a single owner.
 
 
-In Banshee type're represented with @ref bs::UPtr "UPtr", which is just a wrapper for the standard library's *std::unique_ptr*. Use @ref bs::bs_unique_ptr_new "bs_unique_ptr_new<T>" to create a unique pointer pointing to a new instance of T, or @ref bs::bs_unique_ptr "bs_unique_ptr" to create one from an existing instance. 
+In bs::f type're represented with @ref bs::UPtr "UPtr", which is just a wrapper for the standard library's *std::unique_ptr*. Use @ref bs::bs_unique_ptr_new "bs_unique_ptr_new<T>" to create a unique pointer pointing to a new instance of T, or @ref bs::bs_unique_ptr "bs_unique_ptr" to create one from an existing instance. 
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // Helper structure
 // Helper structure
@@ -41,9 +41,9 @@ UPtr<MyStruct> ptrOther = std::move(ptr);
 # Shared pointers
 # Shared pointers
 Shared pointers are similar to unique pointers, as they also don't require the object to be explicitly freed after creation. However, unlike unique pointers they can be copied (therefore their name "shared"). This means multiple entities can hold a shared pointer to a single object. Only once ALL such entities lose their shared pointers will the pointed-to object be destroyed.
 Shared pointers are similar to unique pointers, as they also don't require the object to be explicitly freed after creation. However, unlike unique pointers they can be copied (therefore their name "shared"). This means multiple entities can hold a shared pointer to a single object. Only once ALL such entities lose their shared pointers will the pointed-to object be destroyed.
 
 
-In Banshee type're represented with @ref bs::SPtr "SPtr", which is just a wrapper for the standard library's *std::shared_ptr*. Use @ref bs::bs_shared_ptr_new "bs_shared_ptr_new<T>" to create a shared pointer pointing to a new instance of T, or @ref bs::bs_shared_ptr "bs_shared_ptr" to create one from an existing instance. 
+In bs::f they are represented with @ref bs::SPtr "SPtr", which is just a wrapper for the standard library's *std::shared_ptr*. Use @ref bs::bs_shared_ptr_new "bs_shared_ptr_new<T>" to create a shared pointer pointing to a new instance of T, or @ref bs::bs_shared_ptr "bs_shared_ptr" to create one from an existing instance. 
 
 
-You will find Banshee uses shared pointers commonly all around its codebase.
+You will find bs::f uses shared pointers commonly all around its codebase.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 SPtr<MyStruct> ptr = bs_shared_ptr_new<MyStruct>(123, false);
 SPtr<MyStruct> ptr = bs_shared_ptr_new<MyStruct>(123, false);

+ 3 - 3
Documentation/Manuals/Native/User/spriteTextures.md

@@ -7,7 +7,7 @@ They are used primarily by 2D elements, like GUI or sprites. Their primary purpo
 
 
 They are represented with the @ref bs::SpriteTexture "SpriteTexture" class and are a **Resource**, same as normal textures. 
 They are represented with the @ref bs::SpriteTexture "SpriteTexture" class and are a **Resource**, same as normal textures. 
 
 
-They're created by calling @ref bs::SpriteTexture::create "SpriteTexture::create". As a parameter it expects the source **Texture**, and an optional set of UV coordinates that map to a specific area on the texture. If no coordinates are provided the sprite texture maps to the entirety of the texture, acting the same as a normal texture.
+They're created by calling @ref bs::SpriteTexture::create "SpriteTexture::create()". As a parameter it expects the source **Texture**, and an optional set of UV coordinates that map to a specific area on the texture. If no coordinates are provided the sprite texture maps to the entirety of the texture, acting the same as a normal texture.
 
 
 UV coordinates begin in the top left corner, and are in range [0, 1], where top left is (0, 0), and bottom right (1, 1).
 UV coordinates begin in the top left corner, and are in range [0, 1], where top left is (0, 0), and bottom right (1, 1).
 
 
@@ -23,7 +23,7 @@ Vector2 size(0.5f, 0.5f);
 HSpriteTexture spriteTexPartial = SpriteTexture::create(offset, size, texture)
 HSpriteTexture spriteTexPartial = SpriteTexture::create(offset, size, texture)
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-Once created, you can get the actual width/height of the mapped area by calling @ref bs::SpriteTexture::getWidth "SpriteTexture::getWidth" and @ref bs::SpriteTexture::getHeight "SpriteTexture::getHeight".
+Once created, you can get the actual width/height of the mapped area by calling @ref bs::SpriteTexture::getWidth "SpriteTexture::getWidth()" and @ref bs::SpriteTexture::getHeight "SpriteTexture::getHeight()".
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 // If our original texture was 1024x1024, this will be 512x512, since it's just a
 // If our original texture was 1024x1024, this will be 512x512, since it's just a
@@ -32,7 +32,7 @@ UINT32 width = spriteTexPartial->getWidth();
 UINT32 height = spriteTexPartial->getHeight();
 UINT32 height = spriteTexPartial->getHeight();
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-You can also always retrieve the underlying texture by calling @ref bs::SpriteTexture::getTexture "SpriteTexture::getTexture".
+You can also always retrieve the underlying texture by calling @ref bs::SpriteTexture::getTexture "SpriteTexture::getTexture()".
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 HTexture texture = spriteTexPartial->getTexture();
 HTexture texture = spriteTexPartial->getTexture();

+ 42 - 6
Documentation/Manuals/Native/User/strings.md

@@ -2,11 +2,47 @@ Strings 						{#strings}
 ===============
 ===============
 Strings are represented with @ref bs::String "String" and @ref bs::WString "WString" types. These are wrappers for the standard C++ strings and have the same interface and behaviour.
 Strings are represented with @ref bs::String "String" and @ref bs::WString "WString" types. These are wrappers for the standard C++ strings and have the same interface and behaviour.
 
 
-Use the **String** type for strings containing only ASCII characters (limited character set). Use the **WString** (wide string) for strings containing more complex characters, as it supports all Unicode characters.
+~~~~~~~~~~~~~{.cpp}
+String narrow = "NarrowString";
+WString wide = L"WideString";
+~~~~~~~~~~~~~
+
+# String encoding
+When using the standard string operator "" note that your string will use platform-specific string encoding. On Windows this will be a single byte non-Unicode locale specific encoding limited to 255 characters, while on macOS and Linux this will be a multi-byte UTF8 encoding. Therefore on Windows you cannot use this operator to encode full range on Unicode values.
+
+~~~~~~~~~~~~~{.cpp}
+// On Windows only valid for 255 characters of the current locale
+String narrow = "NarrowString";
+
+// On Windows this will not be encoded properly as these characters are unlikely to all be present
+// in the current locale
+String invalidNarrow = "ж¤ÞÐ";
+~~~~~~~~~~~~~
+
+Therefore if you need to support a whole range of Unicode characters make sure to either use **WString** and "L" prefix, or even better **String** and "u8" prefix. Otherwise you risk that your character wont be encoded properly for all platforms.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
-String simple = "SimpleString";
-WString complex = "ж¤ÞÐ";
+// Wide strings will always properly encode Unicode, but use unnecessarily large 32-bit UTF32 on Linux/macOS
+WString validWide = L"ж¤ÞÐ";
+
+// Best option is to use narrow strings and force UTF8 encoding
+String validNarrow = u8"ж¤ÞÐ";
+~~~~~~~~~~~~~
+
+# Converting between encodings
+bs::f provides a variety of methods to convert between most common string encodings. This functionality is provided in the @ref bs::UTF8 "UTF8" class. For example use @ref bs::UTF8::fromANSI "UTF8::fromANSI()" to convert from locale-specific encoding to UTF8, and @ref bs::UTF8::toANSI "UTF8::toANSI()" for other way around. Conversions for UTF-16 and UTF-32 are also provided.
+
+~~~~~~~~~~~~~{.cpp}
+// Assuming Windows platform
+
+// Locale specific ANSI encoding
+String strANSI = "NarrowString";
+
+// Convert to UTF-8
+String strUTF8 = UTF8::fromANSI(strANSI);
+
+// And back to ANSI
+strANSI = UTF8::toANSI(strUTF8);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # Converting data types
 # Converting data types
@@ -46,11 +82,11 @@ string = StringUtil::replaceAll(string, "banana", "643");
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # Formatting strings
 # Formatting strings
-Often you need to construct larger strings from other strings. Use @ref bs::StringUtil::format "StringUtil::format" to construct such strings by providing a template string, which contains special identifiers for inserting other strings. The identifiers are represented like "{0}, {1}" in the source string, where the number represents the position of the parameter that will be used for replacing the identifier.
+Often you need to construct larger strings from other strings. Use @ref bs::StringUtil::format "StringUtil::format()" to construct such strings by providing a template string, which contains special identifiers for inserting other strings. The identifiers are represented like "{0}, {1}" in the source string, where the number represents the position of the parameter that will be used for replacing the identifier.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 String templateStr = "Hello, my name is {0}.";
 String templateStr = "Hello, my name is {0}.";
-String str = StringUtil::format(templateStr, "Banshee");
+String str = StringUtil::format(templateStr, "bs::f");
 
 
-// str now contains the string "Hello, my name is Banshee."
+// str now contains the string "Hello, my name is bs::f."
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~

+ 2 - 2
Documentation/Manuals/Native/User/surfaceShaders.md

@@ -1,9 +1,9 @@
 Surface & lighting shaders					{#surfaceShaders}
 Surface & lighting shaders					{#surfaceShaders}
 ===============
 ===============
 
 
-So far we have shown how to create a BSL shader from scratch. But when working with Banshee's default renderer there is a lot going on, and your shaders would need to be quite complex and fully compliant with what the renderer expects. This is not something a normal user wants, or needs to deal with.
+So far we have shown how to create a BSL shader from scratch. But when working with bs::f default renderer there is a lot going on, and your shaders would need to be quite complex and fully compliant with what the renderer expects. This is not something a normal user wants, or needs to deal with.
 
 
-For this reason Banshee provides a couple of **mixin**s you can override. By overriding these mixins you can change the renderer's behaviour without having to deal with majority of its complexities.
+For this reason bs::f provides a couple of **mixin**s you can override. By overriding these mixins you can change the renderer's behaviour without having to deal with majority of its complexities.
 
 
 
 
 
 

+ 1 - 1
Documentation/Manuals/Native/User/time.md

@@ -1,7 +1,7 @@
 Measuring time								{#time}
 Measuring time								{#time}
 ===============
 ===============
 
 
-Being able to tell the current time, as well as being able to tell elapsed time since the last frame is important for any real-time application. Use the @ref bs::Time "Time" class, accessible through @ref bs::gTime "gTime()" to retrieve global information about the time in Banshee.
+Being able to tell the current time, as well as being able to tell elapsed time since the last frame is important for any real-time application. Use the @ref bs::Time "Time" class, accessible through @ref bs::gTime "gTime()" to retrieve global information about the time in bs::f.
 
 
 # Current time
 # Current time
 
 

+ 3 - 5
Documentation/Manuals/Native/User/windows.md

@@ -1,7 +1,7 @@
 Windows					{#windows}
 Windows					{#windows}
 ===============
 ===============
 
 
-A window represents the final destination where the application's rendered output gets displayed to the user. It has a title, size and a position. Window can cover the entirety of the user's screen (fullscreen mode) or just part of it (windowed mode). In Banshee a window is represented using the @ref bs::RenderWindow "RenderWindow" class.
+A window represents the final destination where the application's rendered output gets displayed to the user. It has a title, size and a position. Window can cover the entirety of the user's screen (fullscreen mode) or just part of it (windowed mode). In bs::f a window is represented using the @ref bs::RenderWindow "RenderWindow" class. We have already shown how the application creates a primary window when it is first started up, and in this chapter we'll show how to create more windows manually as well as manipulate them.
 
 
 ![Render window](RenderWindow.png)  
 ![Render window](RenderWindow.png)  
 
 
@@ -53,7 +53,7 @@ newWindow->setWindowed(1280, 720);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
 # Window properties
 # Window properties
-You can access current properties of the window, like its size and position, by calling @ref bs::RenderWindow::getProperties "RenderWindow::getProperties", which returns a @ref bs::RenderWindowProperties "RenderWindowProperties" object. For example let's print out current window's size:
+You can access current properties of the window, like its size and position, by calling @ref bs::RenderWindow::getProperties "RenderWindow::getProperties()", which returns a @ref bs::RenderWindowProperties "RenderWindowProperties" object. For example let's print out current window's size:
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 auto& props = newWindow->getProperties();
 auto& props = newWindow->getProperties();
@@ -73,10 +73,8 @@ void notifyResized()
 newWindow->onResized.connect(&notifyResized);
 newWindow->onResized.connect(&notifyResized);
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-> **RenderWindow::onResized** is an example of an event. They are explained later in the [event manual](@ref events).
-
 # Video modes
 # Video modes
-During window creation and calls to **RenderWindow::setFullscreen** we have seen the use of the @ref bs::VideoMode "VideoMode" class. This class allows you to specify the resolution of the window, along with an optional refresh rate and output monitor (in case of multi-monitor setups, to choose on which monitor to show the window). 
+During window creation and calls to **RenderWindow::setFullscreen()** we have seen the use of the @ref bs::VideoMode "VideoMode" class. This class allows you to specify the resolution of the window, along with an optional refresh rate and output monitor (in case of multi-monitor setups, to choose on which monitor to show the window). 
 
 
 You can create your own **VideoMode** with custom parameters (as we did so far), or you can query for all video modes supported by the user's GPU by calling @ref bs::RenderAPI::getVideoModeInfo() "RenderAPI::getVideoModeInfo()". This will return a @ref bs::VideoModeInfo "VideoModeInfo" object that contains information about all available monitors, their supported resolutions and refresh rates.
 You can create your own **VideoMode** with custom parameters (as we did so far), or you can query for all video modes supported by the user's GPU by calling @ref bs::RenderAPI::getVideoModeInfo() "RenderAPI::getVideoModeInfo()". This will return a @ref bs::VideoModeInfo "VideoModeInfo" object that contains information about all available monitors, their supported resolutions and refresh rates.
 
 

+ 1 - 1
Documentation/Manuals/Native/advMemAlloc.md

@@ -2,7 +2,7 @@ Advanced memory allocation									{#advMemAlloc}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Banshee allows you to allocate memory in various ways, so you can have fast memory allocations for many situations. We have already shown how to allocate memory for the general case, using **bs_new** / **bs_delete**, **bs_alloc**, **bs_free** and shown how to use shared pointers. But allocating memory using these general purpose allocators can be expensive. Therefore it is beneficial to have more specialized allocator types that have certain restrictions, but allocate memory with almost no overhead.
+bs::f allows you to allocate memory in various ways, so you can have fast memory allocations for many situations. We have already shown how to allocate memory for the general case, using **bs_new** / **bs_delete**, **bs_alloc**, **bs_free** and shown how to use shared pointers. But allocating memory using these general purpose allocators can be expensive. Therefore it is beneficial to have more specialized allocator types that have certain restrictions, but allocate memory with almost no overhead.
 
 
 # Stack allocator {#advMemAlloc_a}
 # Stack allocator {#advMemAlloc_a}
 Stack allocator allows you to allocate memory quickly and with zero fragmentation. It comes with a restriction that it can only deallocate memory in the opposite order it was allocated. This usually only makes it suitable for temporary allocations within a single method, where you can guarantee the proper order.
 Stack allocator allows you to allocate memory quickly and with zero fragmentation. It comes with a restriction that it can only deallocate memory in the opposite order it was allocated. This usually only makes it suitable for temporary allocations within a single method, where you can guarantee the proper order.

+ 3 - 3
Documentation/Manuals/Native/apiRefPages.md

@@ -28,10 +28,10 @@ Color color = output->getColorAt(0, 0);
 
 
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-Optionally, if the method returns an @ref bs::AsyncOp "AsyncOp" object, you can call @ref bs::AsyncOp::blockUntilComplete "AsyncOp::blockUntilComplete" for the same result. However you must ensure that you don't call it before **CoreThread::submit()** has been called, otherwise the calling thread might deadlock as it waits for the operations to finish, even though it was never started.
+Optionally, if the method returns an @ref bs::AsyncOp "AsyncOp" object, you can call @ref bs::AsyncOp::blockUntilComplete "AsyncOp::blockUntilComplete()" for the same result. However you must ensure that you don't call it before **CoreThread::submit()** has been called, otherwise the calling thread might deadlock as it waits for the operations to finish, even though it was never started.
 
 
-If the method provides a return value, you can use the provided **AsyncOp** object to retrieve it using @ref bs::AsyncOp::getReturnValue<T> "AsyncOp::getReturnValue<T>". Calling this is only valid if @ref bs::AsyncOp::hasCompleted "AsyncOp::hasCompleted" returns true. Return value is always available after you blocked using either of the ways mentioned above.
+If the method provides a return value, you can use the provided **AsyncOp** object to retrieve it using @ref bs::AsyncOp::getReturnValue<T> "AsyncOp::getReturnValue<T>()". Calling this is only valid if @ref bs::AsyncOp::hasCompleted "AsyncOp::hasCompleted()" returns true. Return value is always available after you blocked using either of the ways mentioned above.
 
 
-Note that blocking is a very expensive operation and should not be done in performance critical code. If you need to wait until an async method completes, it is preferable to keep querying **AsyncOp::hasCompleted** and do something else until it returns true.
+Note that blocking is a very expensive operation and should not be done in performance critical code. If you need to wait until an async method completes, it is preferable to keep querying **AsyncOp::hasCompleted()** and do something else until it returns true.
 
 
 If you wish to learn more about how core thread works, visit the [core thread manual](@ref coreThread).
 If you wish to learn more about how core thread works, visit the [core thread manual](@ref coreThread).

+ 7 - 7
Documentation/Manuals/Native/architecture.md

@@ -2,9 +2,9 @@ Architecture									{#architecture}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-This manual will explain the architecture of Banshee, to give you a better idea of how everything is structured and where to locate particular systems.
+This manual will explain the architecture of bs::f, to give you a better idea of how everything is structured and where to locate particular systems.
 
 
-Banshee is implemented throughout many separate libraries. Spreading the engine implementation over different libraries ensures multiple things:
+bs::f is implemented throughout many separate libraries. Spreading the engine implementation over different libraries ensures multiple things:
  - Portions of the engine can be easily modified or replaced
  - Portions of the engine can be easily modified or replaced
  - User can choose which portions of the engine he requires
  - User can choose which portions of the engine he requires
  - Internals are easier to understand as libraries form a clear architecture between themselves, while ensuring source code isn't all bulked into one big package
  - Internals are easier to understand as libraries form a clear architecture between themselves, while ensuring source code isn't all bulked into one big package
@@ -17,9 +17,9 @@ All the libraries can be separated into four main categories:
  - Executable - These are small pieces of code meant to initialize and start up the engine/editor.
  - Executable - These are small pieces of code meant to initialize and start up the engine/editor.
  
  
 To give you a better idea here is a diagram showing how all the libraries connect. You can use this for reference when we talk about the individual library purposes later on.
 To give you a better idea here is a diagram showing how all the libraries connect. You can use this for reference when we talk about the individual library purposes later on.
-![Banshee's libraries](ArchitectureSimple.png)  
+![bs::f libraries](ArchitectureSimple.png)  
 
 
-> Note that BansheeEditor layer and scripting libraries are only available when compiling the full Banshee runtime, and are not present in bsFramework.
+> Note that BansheeEditor layer and scripting libraries are only available when compiling the full bs::f runtime, and are not present in bsFramework.
  
  
 # Layers #										{#arch_layers}
 # Layers #										{#arch_layers}
 The layers contain the core of the engine. All the essentials and all the abstract interfaces for plugins belong here. The engine core was split into multiple layers for two reasons:
 The layers contain the core of the engine. All the essentials and all the abstract interfaces for plugins belong here. The engine core was split into multiple layers for two reasons:
@@ -42,7 +42,7 @@ This layer builds upon the abstraction provided by the core layer and provides a
 And finally the top layer is the editor. It builts upon everything else so far and provides various editor specific features like the project library, build system, editor window management, scene view tools and similar. Large portions of the editor are implemented in the scripting code, and this layer provides more of a set of helper tools used by the scripting system. If you are going to work with this layer you will also be working closely with the scripting interop code and the scripting code (see below).
 And finally the top layer is the editor. It builts upon everything else so far and provides various editor specific features like the project library, build system, editor window management, scene view tools and similar. Large portions of the editor are implemented in the scripting code, and this layer provides more of a set of helper tools used by the scripting system. If you are going to work with this layer you will also be working closely with the scripting interop code and the scripting code (see below).
 
 
 # Plugins #										{#arch_plugins}
 # Plugins #										{#arch_plugins}
-Banshee provides a wide variety of plugins out of the box. The plugins are loaded dynamically and allow you to change engine functionality completely transparently to other systems (e.g. you can choose to load an OpenGL renderer instead of a DirectX one). Some plugins are completely optional and you can choose to ignore them (e.g. importer plugins can usually be ignored for game builds). Most importantly the plugins segregate the code, ensuring the design of the engine is decoupled and clean. Each plugin is based on an abstract interface implemented in one of the layers (for the most part, BansheeCore and %BansheeEngine layers).
+bs::f provides a wide variety of plugins out of the box. The plugins are loaded dynamically and allow you to change engine functionality completely transparently to other systems (e.g. you can choose to load an OpenGL renderer instead of a DirectX one). Some plugins are completely optional and you can choose to ignore them (e.g. importer plugins can usually be ignored for game builds). Most importantly the plugins segregate the code, ensuring the design of the engine is decoupled and clean. Each plugin is based on an abstract interface implemented in one of the layers (for the most part, BansheeCore and %BansheeEngine layers).
 
 
 ## Render API ##								{#arch_rapi}		
 ## Render API ##								{#arch_rapi}		
 Render API plugins allow you to use a different backend for performing hardware accelerated rendering. @ref bs::RenderAPI "RenderAPI" handles low level rendering, including features like vertex/index buffers, creating rasterizer/depth/blend states, shader programs, render targets, textures, draw calls and similar. 
 Render API plugins allow you to use a different backend for performing hardware accelerated rendering. @ref bs::RenderAPI "RenderAPI" handles low level rendering, including features like vertex/index buffers, creating rasterizer/depth/blend states, shader programs, render targets, textures, draw calls and similar. 
@@ -59,12 +59,12 @@ All importers implement a relatively simple interface represented by the @ref bs
  - **BansheeFreeImgImporter** - Handles import of most popular image formats, like .png, .psd, .jpg, .bmp and similar. It uses the FreeImage library for reading the image files and converting them into engine's @ref bs::Texture "Texture" format.
  - **BansheeFreeImgImporter** - Handles import of most popular image formats, like .png, .psd, .jpg, .bmp and similar. It uses the FreeImage library for reading the image files and converting them into engine's @ref bs::Texture "Texture" format.
  - **BansheeFBXImporter** - Handles import of FBX mesh files. Uses Autodesk FBX SDK for reading the files and converting them into engine's @ref bs::Mesh "Mesh" format.
  - **BansheeFBXImporter** - Handles import of FBX mesh files. Uses Autodesk FBX SDK for reading the files and converting them into engine's @ref bs::Mesh "Mesh" format.
  - **BansheeFontImporter** - Handles import of TTF and OTF font files. Uses FreeType for reading the font files and converting them into engine's @ref bs::Font "Font" format.
  - **BansheeFontImporter** - Handles import of TTF and OTF font files. Uses FreeType for reading the font files and converting them into engine's @ref bs::Font "Font" format.
- - **BansheeSL** - Provides an implementation of the Banshee's shader language that allows you to easily define an entire pipeline state in a single file. Imports .bsl files into engine's @ref bs::Shader "Shader" format.
+ - **BansheeSL** - Provides an implementation of the bs::f shader language that allows you to easily define an entire pipeline state in a single file. Imports .bsl files into engine's @ref bs::Shader "Shader" format.
 
 
 ## Others ##									{#arch_others}
 ## Others ##									{#arch_others}
 
 
 ### BansheeOISInput ###							{#arch_ois}
 ### BansheeOISInput ###							{#arch_ois}
-Handles raw mouse/keyboard/gamepad input for multiple platforms. All input plugins implement the @ref bs::RawInputHandler "RawInputHandler" interface. Uses the OIS library specifically modified for Banshee (source code available with Banshee's dependencies). 
+Handles raw mouse/keyboard/gamepad input for multiple platforms. All input plugins implement the @ref bs::RawInputHandler "RawInputHandler" interface. Uses the OIS library specifically modified for bs::f (source code available with bs::f dependencies). 
 
 
 ### BansheePhysX ###				{#arch_physx}
 ### BansheePhysX ###				{#arch_physx}
 Handles physics: rigidbodies, colliders, triggers, joints, character controller and similar. Implements the @ref bs::Physics "Physics" interface and any related classes (e.g. @ref bs::Rigidbody "Rigidbody", @ref bs::Collider "Collider"). Uses NVIDIA PhysX as the backend.
 Handles physics: rigidbodies, colliders, triggers, joints, character controller and similar. Implements the @ref bs::Physics "Physics" interface and any related classes (e.g. @ref bs::Rigidbody "Rigidbody", @ref bs::Collider "Collider"). Uses NVIDIA PhysX as the backend.

+ 2 - 2
Documentation/Manuals/Native/codeStyle.md

@@ -2,7 +2,7 @@ Coding style							{#codeStyle}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-When making changes or additions that you plan on contributing to Banshee, you must follow the same coding style as the rest of the codebase. This document tries to list the most important aspects of the style.
+When making changes or additions that you plan on contributing to bs::f, you must follow the same coding style as the rest of the codebase. This document tries to list the most important aspects of the style.
 
 
 **Spacing**
 **Spacing**
  - Use tabs instead of spaces for indentation
  - Use tabs instead of spaces for indentation
@@ -62,7 +62,7 @@ When making changes or additions that you plan on contributing to Banshee, you m
  - Avoid the use of `auto` for variable types, with the exception of very long type names (like iterators)
  - Avoid the use of `auto` for variable types, with the exception of very long type names (like iterators)
   - If `auto` is used try to name the variable so its type can be easily deduced
   - If `auto` is used try to name the variable so its type can be easily deduced
  - Use built-in typedefs for standard library containers (e.g. `Vector`) and shared pointers (`SPtr`).
  - Use built-in typedefs for standard library containers (e.g. `Vector`) and shared pointers (`SPtr`).
- - Don't allocate memory using `new/delete` or `malloc/free`, instead use Banshee's allocators
+ - Don't allocate memory using `new/delete` or `malloc/free`, instead use bs::f allocators
  - No code warnings under default compiler warning settings are allowed. Fix all your warnings or if absolutely not possible isolate that bit of code and disable that specific warning (but only in that bit of code).
  - No code warnings under default compiler warning settings are allowed. Fix all your warnings or if absolutely not possible isolate that bit of code and disable that specific warning (but only in that bit of code).
  - Never use `using namespace` in a header
  - Never use `using namespace` in a header
  
  

+ 1 - 1
Documentation/Manuals/Native/commandBuffers.md

@@ -1,7 +1,7 @@
 Command buffers		{#commandBuffers}
 Command buffers		{#commandBuffers}
 ===============
 ===============
 
 
-Rendering can be a very CPU heavy operation even though GPU does all the rendering - but CPU is still the one submitting all those commands. For this purpose Banshee provides a @ref bs::ct::CommandBuffer "ct::CommandBuffer" object. This object allows you to queue low-level rendering commands on different threads, allowing you to better distribute the CPU usage. Normally rendering commands are only allowed to be submitted from the core thread, but when using command buffers you are allowed to use a different thread for each command buffer.
+Rendering can be a very CPU heavy operation even though GPU does all the rendering - but CPU is still the one submitting all those commands. For this purpose bs::f provides a @ref bs::ct::CommandBuffer "ct::CommandBuffer" object. This object allows you to queue low-level rendering commands on different threads, allowing you to better distribute the CPU usage. Normally rendering commands are only allowed to be submitted from the core thread, but when using command buffers you are allowed to use a different thread for each command buffer.
 
 
 Almost every method on **RenderAPI** accepts a **CommandBuffer** as its last parameter. If you don't provide one the system will instead use its primary internal command buffer. When you do you can use **RenderAPI** from different threads safely.
 Almost every method on **RenderAPI** accepts a **CommandBuffer** as its last parameter. If you don't provide one the system will instead use its primary internal command buffer. When you do you can use **RenderAPI** from different threads safely.
 
 

+ 1 - 1
Documentation/Manuals/Native/coreThread.md

@@ -2,7 +2,7 @@ Core Thread								{#coreThread}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Banshee is a multi-threaded engine that has two primary threads. One is the main thread on which the application is started, this is where your game code runs and what majority of users will be working with, we call this the **simulation** thread. The second thread is the rendering thread, this is where all calls to render API (like Vulkan/DirectX/OpenGL) are made. This thread also deals with the OS (like the main message loop). We call this the **core** thread.
+bs::f is a multi-threaded engine that has two primary threads. One is the main thread on which the application is started, this is where your game code runs and what majority of users will be working with, we call this the **simulation** thread. The second thread is the rendering thread, this is where all calls to render API (like Vulkan/DirectX/OpenGL) are made. This thread also deals with the OS (like the main message loop). We call this the **core** thread.
 
 
 Various other operations can use threads other than the two primary ones (async resource loading, physics, animation, etc.) in the form of worker threads or tasks. But we won't touch on those as they act as standard threads and the system using them has full control.
 Various other operations can use threads other than the two primary ones (async resource loading, physics, animation, etc.) in the form of worker threads or tasks. But we won't touch on those as they act as standard threads and the system using them has full control.
 
 

+ 2 - 2
Documentation/Manuals/Native/customGui.md

@@ -2,7 +2,7 @@ Extending the GUI system						{#customGUI}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Even though Banshee provides fully skinnable and very customizable GUI elements, sometimes the built-in ones are just not enough if you need some very specialized functionality or look. Banshee allows you to create brand new elements and fully customize the way how they are rendered and how the user interacts with such elements.
+Even though bs::f provides fully skinnable and very customizable GUI elements, sometimes the built-in ones are just not enough if you need some very specialized functionality or look. bs::f allows you to create brand new elements and fully customize the way how they are rendered and how the user interacts with such elements.
 
 
 You are expected to have read the user-facing GUI manuals before proceeding, and as such familiarized yourself with the basics.
 You are expected to have read the user-facing GUI manuals before proceeding, and as such familiarized yourself with the basics.
 
 
@@ -149,7 +149,7 @@ class GUITexture : public GUIElement
 };
 };
 ~~~~~~~~~~~~~
 ~~~~~~~~~~~~~
 
 
-Banshee also provides a set of helper classes for generating required geometry in the form of @ref bs::ImageSprite "ImageSprite" and @ref bs::TextSprite "TextSprite" classes. **ImageSprite** can easily generate image geometry of specified size, whether a simple quad or a scale-9-grid image (scalable image with fixed borders). And **TextSprite** will take a text string, font and additional options as input, and output a set of quads required for text rendering.
+bs::f also provides a set of helper classes for generating required geometry in the form of @ref bs::ImageSprite "ImageSprite" and @ref bs::TextSprite "TextSprite" classes. **ImageSprite** can easily generate image geometry of specified size, whether a simple quad or a scale-9-grid image (scalable image with fixed borders). And **TextSprite** will take a text string, font and additional options as input, and output a set of quads required for text rendering.
 
 
 @ref bs::GUIElement::_getMaterial() "GUIElement::_getMaterial()"
 @ref bs::GUIElement::_getMaterial() "GUIElement::_getMaterial()"
 
 

+ 1 - 1
Documentation/Manuals/Native/customImporters.md

@@ -2,7 +2,7 @@ Creating custom importers						{#customImporters}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Importers process raw resources in a third-party format (like FBX mesh or a PNG image) into an engine-ready format (e.g. a **Mesh** or a **Texture**). Banshee has an extensible importer system so you may easily add your own importers, either for existing resource types or for new ones. This way you can add support for new third party file formats.
+Importers process raw resources in a third-party format (like FBX mesh or a PNG image) into an engine-ready format (e.g. a **Mesh** or a **Texture**). bs::f has an extensible importer system so you may easily add your own importers, either for existing resource types or for new ones. This way you can add support for new third party file formats.
 
 
 To implement your own importer you need to implement the @ref bs::SpecificImporter "SpecificImporter" interface.
 To implement your own importer you need to implement the @ref bs::SpecificImporter "SpecificImporter" interface.
 
 

+ 1 - 1
Documentation/Manuals/Native/customRenderer.md

@@ -2,7 +2,7 @@ Creating a renderer plugin							{#customRenderer}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-If your project requires a very specific form of rendering you might decide you want to write your own renderer from scratch. In Banshee renderers are built as plugins, and this manual will show you how to create one. This manual can also be useful if trying to understand how the renderer works, even if you are not implementing your own.
+If your project requires a very specific form of rendering you might decide you want to write your own renderer from scratch. In bs::f renderers are built as plugins, and this manual will show you how to create one. This manual can also be useful if trying to understand how the renderer works, even if you are not implementing your own.
 
 
 # Components and the renderer {#renderer_a}
 # Components and the renderer {#renderer_a}
 We've already shown how to render scene objects. You create a **SceneObject** on which you then attach components such as **CCamera**, **CRenderable** or **CLight**. These components will then register themselves with the renderer, when takes care of everything else rendering-wise.
 We've already shown how to render scene objects. You create a **SceneObject** on which you then attach components such as **CCamera**, **CRenderable** or **CLight**. These components will then register themselves with the renderer, when takes care of everything else rendering-wise.

+ 0 - 1
Documentation/Manuals/Native/devManuals.md

@@ -38,6 +38,5 @@ Developer manuals									{#devManuals}
   - [Interacting with the script runtime](@ref mono)
   - [Interacting with the script runtime](@ref mono)
   - [Script objects](@ref scriptObjects)
   - [Script objects](@ref scriptObjects)
 - [Extending the GUI system](@ref customGUI)
 - [Extending the GUI system](@ref customGUI)
-- [Porting to other platforms](@ref porting)
 - [Code style](@ref codeStyle)
 - [Code style](@ref codeStyle)
 - [Quick reference](@ref quickref)
 - [Quick reference](@ref quickref)

+ 4 - 4
Documentation/Manuals/Native/gettingStarted.md

@@ -2,10 +2,10 @@ Getting started								{#gettingStarted}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-This manual offers a quick overview of commonly used Banshee functionality, in order to give you a better idea of how Banshee works. For a fully working example check out the `ExampleGettingStarted` project available with the source code.
+This manual offers a quick overview of commonly used bs::f functionality, in order to give you a better idea of how bs::f works. For a fully working example check out the `ExampleGettingStarted` project available with the source code.
 
 
 # Starting an application
 # Starting an application
-Banshee is started through the @ref bs::Application "Application" interface. To start the engine you need to provide it with a description of the primary render window.
+bs::f is started through the @ref bs::Application "Application" interface. To start the engine you need to provide it with a description of the primary render window.
 
 
 The application is started by calling @ref bs::Application::startUp "Application::startUp()", after which you can set up your custom code in the form of components (see later). Finally you can run the main loop with @ref bs::Application::runMainLoop "Application::runMainLoop()" which will execute your code and actually get everything in motion.
 The application is started by calling @ref bs::Application::startUp "Application::startUp()", after which you can set up your custom code in the form of components (see later). Finally you can run the main loop with @ref bs::Application::runMainLoop "Application::runMainLoop()" which will execute your code and actually get everything in motion.
 
 
@@ -36,7 +36,7 @@ HTexture dragonTexture = gImporter().import<Texture>("Dragon.psd");
 # Setting up a material
 # Setting up a material
 Once we have a mesh and a texture we need some way to apply that texture to the mesh. For that reason we first import a @ref bs::Shader "Shader" that describes how is an object rendered, which we then use to create a @ref bs::Material "Material" which allows us to apply our previously loaded **Texture**.
 Once we have a mesh and a texture we need some way to apply that texture to the mesh. For that reason we first import a @ref bs::Shader "Shader" that describes how is an object rendered, which we then use to create a @ref bs::Material "Material" which allows us to apply our previously loaded **Texture**.
 
 
-Banshee uses .bsl files to describe shaders and you can learn more about BSL syntax in the @ref shaders manual. To learn more about materials and how to use them read the @ref simpleMaterial manual.
+bs::f uses .bsl files to describe shaders and you can learn more about BSL syntax in the @ref shaders manual. To learn more about materials and how to use them read the @ref simpleMaterial manual.
 
 
 ~~~~~~~~~~~~~{.cpp}
 ~~~~~~~~~~~~~{.cpp}
 HShader diffuse = gImporter().import<Shader>("Diffuse.bsl");
 HShader diffuse = gImporter().import<Shader>("Diffuse.bsl");
@@ -96,4 +96,4 @@ guiLayout->addNewElement<GUIButton>(HString(L"Click me too!"));
 # Final result
 # Final result
 @ref TODO_IMAGE
 @ref TODO_IMAGE
 
 
-There is a lot more to Banshee, but hopefully this gave you a quick taste of how it works. Continue reading other manuals and the API reference for more information.
+There is a lot more to bs::f, but hopefully this gave you a quick taste of how it works. Continue reading other manuals and the API reference for more information.

+ 1 - 1
Documentation/Manuals/Native/gpuBuffers.md

@@ -2,7 +2,7 @@ GPU Buffers			{#gpuBuffers}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-GPU buffers (also known as generic buffers) allow you to provide data to a **GpuProgram** similar as a texture. In particular they are very similar to a one-dimensional texture. They aren't constrained by size limitations like a texture, and allow each entry in the buffer to be more complex than just a primitive data type. This allows you to provide your GPU programs with complex data easily. In Banshee they are represented using the @ref bs::ct::GpuBuffer "ct::GpuBuffer" type. 
+GPU buffers (also known as generic buffers) allow you to provide data to a **GpuProgram** similar as a texture. In particular they are very similar to a one-dimensional texture. They aren't constrained by size limitations like a texture, and allow each entry in the buffer to be more complex than just a primitive data type. This allows you to provide your GPU programs with complex data easily. In bs::f they are represented using the @ref bs::ct::GpuBuffer "ct::GpuBuffer" type. 
 
 
 # Creation {#gpuBuffers_a}
 # Creation {#gpuBuffers_a}
 To create a **ct::GpuBuffer** you must fill out a @ref bs::GPU_BUFFER_DESC "GPU_BUFFER_DESC" structure and call the @ref bs::ct::GpuBuffer::create "ct::GpuBuffer::create()" method. At minimum you need to provide:
 To create a **ct::GpuBuffer** you must fill out a @ref bs::GPU_BUFFER_DESC "GPU_BUFFER_DESC" structure and call the @ref bs::ct::GpuBuffer::create "ct::GpuBuffer::create()" method. At minimum you need to provide:

+ 3 - 3
Documentation/Manuals/Native/gpuPrograms.md

@@ -2,11 +2,11 @@ GPU programs									{#gpuPrograms}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-GPU programs are programmable parts of the GPU pipeline, in other literature often called shaders (Banshee uses the word shader for a higher level concept, so we won't call them that). They are core objects, meaning they can be used on both sim and core threads. We'll focus on the core thread version and note the differences between the two where relevant.
+GPU programs are programmable parts of the GPU pipeline, in other literature often called shaders (bs::f uses the word shader for a higher level concept, so we won't call them that). They are core objects, meaning they can be used on both sim and core threads. We'll focus on the core thread version and note the differences between the two where relevant.
 
 
-In Banshee they are represented with the @ref bs::ct::GpuProgram "ct::GpuProgram" class. There are six types of GPU programs: vertex, hull (tesselation control), domain (tesselation evaluation), geometry, fragment (pixel) and compute programs. Each is used for a different purpose but has the same interface. We assume the user is familiar with the GPU pipeline and what the different program types do. 
+In bs::f they are represented with the @ref bs::ct::GpuProgram "ct::GpuProgram" class. There are six types of GPU programs: vertex, hull (tesselation control), domain (tesselation evaluation), geometry, fragment (pixel) and compute programs. Each is used for a different purpose but has the same interface. We assume the user is familiar with the GPU pipeline and what the different program types do. 
 
 
-> Note that if you are using Banshee Shading Language you do not need to create GPU programs manually - any shaders you import and materials created from those shaders will have GPU programs created internally, but they will be hidden from the normal user.
+> Note that if you are using bs::f Shading Language you do not need to create GPU programs manually - any shaders you import and materials created from those shaders will have GPU programs created internally, but they will be hidden from the normal user.
 
 
 # Creating GPU programs {#gpuPrograms_a}
 # Creating GPU programs {#gpuPrograms_a}
 To create a GPU program call @ref bs::ct::GpuProgram::create() "ct::GpuProgram::create()" with a @ref bs::GPU_PROGRAM_DESC "GPU_PROGRAM_DESC" structure. The structure needs to have the following fields populated:
 To create a GPU program call @ref bs::ct::GpuProgram::create() "ct::GpuProgram::create()" with a @ref bs::GPU_PROGRAM_DESC "GPU_PROGRAM_DESC" structure. The structure needs to have the following fields populated:

+ 11 - 10
Documentation/Manuals/Native/manuals.md

@@ -14,15 +14,16 @@ User manuals									{#manuals}
  - [Meshes](@ref importingMeshes)
  - [Meshes](@ref importingMeshes)
  - [Materials](@ref simpleMaterial)
  - [Materials](@ref simpleMaterial)
  - [Textures](@ref importingTextures)
  - [Textures](@ref importingTextures)
- - [Lighting](@ref lights) 
+ - [Lighting](@ref lights)
+ - [Saving a scene](@ref savingScene)
 - **Input**
 - **Input**
  - [Input polling](@ref inputPolling) 
  - [Input polling](@ref inputPolling) 
  - [Input events](@ref inputEvents) 
  - [Input events](@ref inputEvents) 
  - [Virtual input](@ref virtualInput)
  - [Virtual input](@ref virtualInput)
 - **Gameplay logic**
 - **Gameplay logic**
- - [Creating custom components](@ref customComponents)
- - [Serializing objects](@ref serializingObjects)
- - Non-component approach
+ - [Creating components](@ref customComponents)
+ - [Persisting data](@ref serializingObjects)
+ - [Non-component approach](@ref nonComponentApproach)
 - **GUI**
 - **GUI**
  - [Sprite textures](@ref spriteTextures)
  - [Sprite textures](@ref spriteTextures)
  - [Basic setup](@ref guiSetup)
  - [Basic setup](@ref guiSetup)
@@ -49,9 +50,12 @@ User manuals									{#manuals}
  - [Bones](@ref bones)
  - [Bones](@ref bones)
  - [Advanced animation](@ref advancedAnimation)
  - [Advanced animation](@ref advancedAnimation)
 - **Advanced rendering**
 - **Advanced rendering**
- - [Image based lighting](@ref imageBasedLighting)
- - Indirect lighting
  - [Renderer settings](@ref renderSettings)
  - [Renderer settings](@ref renderSettings)
+ - [Skybox](@ref skybox)
+ - [Reflection environment](@ref reflectionProbes)
+ - [Indirect lighting](@ref indirectLighting)
+ - [Advanced textures](@ref advancedTextures)
+ - [Advanced meshes](@ref creatingMeshes)
  - [Offscreen rendering](@ref offscreenRendering)
  - [Offscreen rendering](@ref offscreenRendering)
  - [Windows](@ref windows)
  - [Windows](@ref windows)
  - **Custom materials**
  - **Custom materials**
@@ -70,9 +74,6 @@ User manuals									{#manuals}
  - [Measuring time](@ref time)
  - [Measuring time](@ref time)
  - [Cursors](@ref cursors)
  - [Cursors](@ref cursors)
  - [Profiling](@ref cpuProfiling)
  - [Profiling](@ref cpuProfiling)
-- **Advanced**
- - [Manipulating textures](@ref advancedTextures)
- - [Creating meshes](@ref creatingMeshes)
- - [Advanced startup](@ref advancedStartup)
+- **Other**
  - [Advanced RTTI](@ref advancedRtti)
  - [Advanced RTTI](@ref advancedRtti)
  - [Prefabs](@ref prefabs)
  - [Prefabs](@ref prefabs)

+ 1 - 1
Documentation/Manuals/Native/nonProgrammableStates.md

@@ -19,7 +19,7 @@ There are three non-programmable state objects in total:
  - @ref bs::ct::DepthStencilState "ct::DepthStencilState"
  - @ref bs::ct::DepthStencilState "ct::DepthStencilState"
  - @ref bs::ct::BlendState "ct::BlendState"
  - @ref bs::ct::BlendState "ct::BlendState"
  
  
-> If using Banshee Shading Language you can specify these states directly in a BSL file and should have no need to create them manually. 
+> If using bs::f Shading Language you can specify these states directly in a BSL file and should have no need to create them manually. 
  
  
 # Rasterizer state {#nonProg_a}
 # Rasterizer state {#nonProg_a}
 Rasterizer state allows you to control how are 3D polygons, lines or points converted to 2D pixels. You can create it by filling out the @ref bs::RASTERIZER_STATE_DESC "RASTERIZER_STATE_DESC" structure and passing it to @ref bs::ct::RasterizerState::create "ct::RasterizerState::create()".
 Rasterizer state allows you to control how are 3D polygons, lines or points converted to 2D pixels. You can create it by filling out the @ref bs::RASTERIZER_STATE_DESC "RASTERIZER_STATE_DESC" structure and passing it to @ref bs::ct::RasterizerState::create "ct::RasterizerState::create()".

+ 2 - 2
Documentation/Manuals/Native/plugins.md

@@ -2,9 +2,9 @@ Plugins						{#plugins}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Many systems in Banshee are implemented through plugins, libraries that are separate from the core of the engine and can be dynamically loaded or unloaded. If possible, it is the prefered way of extending the engine.
+Many systems in bs::f are implemented through plugins, libraries that are separate from the core of the engine and can be dynamically loaded or unloaded. If possible, it is the prefered way of extending the engine.
 
 
-Banshee supports plugins for the following systems:
+bs::f supports plugins for the following systems:
  - Audio - Systems for providing audio playback.
  - Audio - Systems for providing audio playback.
  - Importers - Importers that handle conversion of some third party resource format into an engine-ready format.
  - Importers - Importers that handle conversion of some third party resource format into an engine-ready format.
  - Input - Reports input events (mouse, keyboard, gamepad, etc.)
  - Input - Reports input events (mouse, keyboard, gamepad, etc.)

+ 0 - 72
Documentation/Manuals/Native/porting.md

@@ -1,72 +0,0 @@
-Porting									{#porting}
-===============
-[TOC]
-
-This guide will try to offer you a solid set of guidelines how to go about porting Banshee to a different operating system.
-
-# Compilation {#porting_a}
-Banshee currently compiles using MSVC and Clang on Windows. There should be little issues compiling using Clang on other platforms. GCC was not tested but considering compilation works with two separate compilers there should be no major issues. Banshee also supports the CMake build system, which can generate make files and project files for various popular compilers and IDEs. 
-
-This means that as far as the compilation goes most of the work should be done for you.
-
-# Platform specific functionality {#porting_b}
-Most of the porting work remains in adding platform specific functionality like file-system, windows and similar. Banshee comes with a fully working implementation of OpenGL & Vulkan, which means the rendering API is already cross-platform (for the most part), and the platform-specific functionality mostly lies in various utility functionality.
-
-Banshee was built with multi-platform in mind from day one, and it tries to minimize the amount of platform specific functionality as much as possible. The functionality that is platform specific is encapsulated so external code never needs to access it directly, making the porting process transparent to higher level systems. All of the platform specific functionality is cleanly marked either in an \#ifdef block or is in a separate source file with a special prefix.
-
-Banshee is built in layers, higher layers referencing lower layers. This should make porting easier as you can start with the lowest layer and work your way up. This way you can compile and test layer by layer instead of needing to fully port the entire engine to properly compile it. Additionally a lot of functionality is in plugins and those generally don't have any platform specific code (except the OpenGL & Vulkan plugins), which should also help with dividing the work into manageable chunks. These aspects of Banshee should significantly help with the porting effort, so keep the layers/plugins in mind.
-
-Aside from dividing the work by layers/plugins you should most definitely also divide it by functionality needed: editor requires significantly more platform specific code than the core framework. You should first strive to port all the features used by the core framework, then after everything is working should you proceed with working on editor features.
-
-All the features that need porting are wrapped in a BS_PLATFORM \#ifdef block, or in files prefixed with "Win32". In a very limited set of cases there are also BS_COMPILER defines for functionality specific to a compiler. For every such block and file you will need to write equivalent code for the destination platform. 
-
-Below you will find a fairly complete list of all such blocks/files that need to be modified, to give you a good idea of the scope. Each listed feature has an indication whether this is a framework or editor-only feature.
-
-Additionally, not all features are critical, meaning you can get the engine to run without them (e.g. platform cursors or clipboard), which are also specially marked. For them it is suggested you implement a dummy version first, and only proceed with actual implementation once the critical features are done.
-
-## Critical features {#porting_b_a}
-A list of all critical features that require the engine to be ran standalone (no editor), in the rough order they should be implemented.
-
-Feature                                         | Editor only 	| Library                       | Dummy implementation  | Relevant files										 | Description
-------------------------------------------------|---------------|-------------------------------|-----------------------|--------------------------------------------------------|-----------------
-File system*								   	| No			| BansheeUtility				| No					| BsFileSystem.h/BsWin32FileSystem.cpp 					 | Opening/creating files, iterating over directories
-Dynamic library loading							| No			| BansheeUtility				| No					| BsDynLib.h/BsDynLib.cpp 							     | Loading dynamic libraries (.dll, .so)
-OpenGL initialization*							| No			| BansheeGLRenderAPI			| No					| BsGLUtil.h, BsGLSupport.h/BsWin32GLSupport.cpp, BsWin32Context.h/BsWin32Context.cpp, BsWin32VideoModeInfo.cpp | Initializing the OpenGL context 
-Vulkan initialization*							| No			| BansheeVulkanRenderAPI		| No					| BsVulkanRenderAPI.cpp, BsWin32VideoModeInfo.cpp | Initializing the Vulkan context
-Window creation*								| No			| BansheeUtility, BansheeGLRenderAPI, BansheeVulkanRenderAPI | No				| BsWin32Window.h/BsWin32Window.cpp, BsWin32Platform.h/BsWin32Platform.cpp, BsWin32RenderWindow.h/BsWin32RenderWindow.cpp | Creating and interacting with the window
-OS message loop*								| No			| BansheeCore					| No					| BsWin32Platform.h/BsWin32Platform.cpp 				 | Running the main message loop, responding to its events
-Input*											| No			| BansheeCore					| Maybe					| BsPlatform.h/BsWin32Platform.cpp 						 | Receive input from OS (mouse, keyboard)
-UUID generation									| No			| BansheeUtility				| No					| BsPlatformUtility.h/BsWin32PlatformUtility.cpp 		 | Generate UUID/GUID
-
-## Non-critical features {#porting_b_b} 
-A list of non-critical features, and editor-only features, in the rough order they should be implemented. You should be able to get the engine running without these, or with just dummy implementations (that do nothing).
-
-Feature                                         | Editor only 	| Library                       | Dummy implementation  | Relevant files											| Description
-------------------------------------------------|---------------|-------------------------------|-----------------------|-----------------------------------------------------------|-------------------
-Crash handler									| No			| BansheeUtility				| Yes					| BsCrashHandler.h, ThreadPool.cpp, BansheeEditorExec.cpp, Main.cpp (in Game project) | Save a log with a callstack when a crash occurs
-Process termination								| No			| BansheeUtility				| Yes					| BsPlatformUtility.h/BsWin32PlatformUtility.cpp | Terminate the application on user request
-Cursor											| No			| BansheeCore					| Yes					| BsPlatform.h/BsWin32Platform.cpp | Get/set cursor position, clip cursor, change cursor look
-Window non-client areas 						| Yes			| BansheeCore					| Yes					| BsPlatform.h/BsWin32Platform.cpp | Set up OS window borders used for resize/move operations
-Changing executable icon*						| Yes			| BansheeCore					| Yes					| BsPlatform.h/BsWin32Platform.cpp | Ability to inject an icon into an executable, used by the build process
-Clipboard										| No			| BansheeUtility				| Yes					| BsPlatformUtility.h/BsWin32PlatformUtility.cpp | Ability to copy/paste text from the editor and the OS
-Converting keyboard code to character			| Yes			| BansheeUtility				| Yes					| BsPlatformUtility.h/BsWin32PlatformUtility.cpp | Converting keyboard codes into a character symbol
-Retrieving MAC address							| Yes			| BansheeUtility				| Yes					| BsPlatformUtility.h/BsWin32PlatformUtility.cpp | Retrieving a MAC address of the computer
-Browse file/folder dialogs						| Yes			| BansheeUtility				| Yes					| BsPlatformUtility.h/BsWin32BrowseDialogs.cpp, BsPlatformUtility.h/BsWin32PlatformUtility.cpp | OS built-in dialogs for browsing/creating files/folders
-Folder monitor*									| Yes			| BansheeCore					| Yes					| BsFolderMonitor.h, BsWin32FolderMonitor.h/BsWin32FolderMonitor.cpp | Monitor that can track and report  file changes/additions/deletions in a folder
-Drop target*									| Yes			| BansheeCore					| Yes					| BsWin32DropTarget.h/BsWin32Platform.cpp | Target that can be used for drag and drop operations initiated by the OS
-Script compilation								| Yes			| BansheeMono					| Yes					| BsMonoManager.cpp | Starting of the external compiler tool, and copying its output files into proper location.
-Game build										| Yes			| Game, BansheeEditor, MBansheeEditor | Yes				| Main.cpp in Game, BuildManager.cpp in BansheeEditor, BuildManager.cs in MBansheeEditor | Copying the right libraries, and assemblies during build. Platform-specific options in the build manager.
-Splash Screen									| Yes			| BansheeEngine					| Yes					| BsSplashScreen.cpp | Displaying a splash screen with the Banshee logo
-MonoDevelop integration*						| Yes			|BansheeEditor					| Yes					| BsCodeEditor.cpp | Ability to open/edit script files with MonoDevelop, similar to how VS integration works
-
-(*) - This is a larger and/or non-trivial task. Most listed tasks are just a few dozen up to a couple of hundred lines of code, or if larger they're trivially simple. Larger tasks are a few hundred lines of code (less than a 1000) and/or might be more difficult to implement than others. This is noted here to give you a better idea of the scope.
-
-# Compiling third party dependencies {#porting_c} 
-In order to run Banshee on different platforms you will also need to compile all of Banshee's dependencies. Most of Banshee's dependencies are only used in plugins, which should make it easier to compile and test them individually.
-
-All used dependencies are already multi-platform and you should have little trouble compiling them for major platforms. See [this link](http://bit.ly/2oLL0uR) for information which dependencies are needed.
-
-# Mobile platforms {#porting_d} 
-If porting to mobile platforms you will also need to provide a compatible render API plugin. Vulkan can be used for some mobiles but isn't supported for all, so you might need to write your own render API plugin. It is suggested you use the BansheeOpenGL plugin as an example of creating it (OpenGL ES is commonly supported on most mobiles, or Metal supported only on Apple platforms).
-
-When porting to mobile you do not need to port any editor specific functionality, and can concern yourself only with porting the core framework. 

+ 1 - 1
Documentation/Manuals/Native/quickref.md

@@ -2,7 +2,7 @@ Quick reference									{#quickref}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Here are some common conventions used throughout Banshee:
+Here are some common conventions used throughout bs::f:
  - It uses right handed coordinate system
  - It uses right handed coordinate system
  - Camera is looking towards the negative z axis
  - Camera is looking towards the negative z axis
  - Screen/window space origin is located in the top left corner
  - Screen/window space origin is located in the top left corner

+ 1 - 1
Documentation/Manuals/Native/rendererExtensions.md

@@ -2,7 +2,7 @@ Renderer extensions								{#rendererExtensions}
 ===============
 ===============
 [TOC]
 [TOC]
 
 
-Renderer is a system that processes all renderable objects in the scene, renders them, applies lighting and shadows, renders overlay elements such as GUI and applies post processing effects. It is the system that determines how your game looks (together with custom materials you might specify). In Banshee the renderer is implemented as a plugin, so you may create your own and fully customize the look of your game. Banshee also comes with a default renderer called "RenderBeast".
+Renderer is a system that processes all renderable objects in the scene, renders them, applies lighting and shadows, renders overlay elements such as GUI and applies post processing effects. It is the system that determines how your game looks (together with custom materials you might specify). In bs::f the renderer is implemented as a plugin, so you may create your own and fully customize the look of your game. bs::f also comes with a default renderer called "RenderBeast".
 
 
 In this chapter we'll show how to create extensions to the renderer, which are primarily useful when adding systems that need to perform rendering, but you do not wish to completely replace existing renderer functionality, but rather add to it. Such systems might perform particle effect rendering, GUI overlays, custom 2D rendering and similar.
 In this chapter we'll show how to create extensions to the renderer, which are primarily useful when adding systems that need to perform rendering, but you do not wish to completely replace existing renderer functionality, but rather add to it. Such systems might perform particle effect rendering, GUI overlays, custom 2D rendering and similar.
 
 

+ 7 - 7
Source/BansheeCore/RenderAPI/BsRenderAPI.cpp

@@ -146,18 +146,18 @@ namespace bs
 
 
 	namespace ct
 	namespace ct
 	{
 	{
-    RenderAPI::RenderAPI()
-        : mCurrentCapabilities(nullptr), mNumDevices(0)
-    {
-    }
+	RenderAPI::RenderAPI()
+		: mCurrentCapabilities(nullptr), mNumDevices(0)
+	{
+	}
 
 
-    RenderAPI::~RenderAPI()
-    {
+	RenderAPI::~RenderAPI()
+	{
 		// Base classes need to call virtual destroy_internal method instead of a destructor
 		// Base classes need to call virtual destroy_internal method instead of a destructor
 
 
 		bs_deleteN(mCurrentCapabilities, mNumDevices);
 		bs_deleteN(mCurrentCapabilities, mNumDevices);
 		mCurrentCapabilities = nullptr;
 		mCurrentCapabilities = nullptr;
-    }
+	}
 
 
 	SPtr<bs::RenderWindow> RenderAPI::initialize(const RENDER_WINDOW_DESC& primaryWindowDesc)
 	SPtr<bs::RenderWindow> RenderAPI::initialize(const RENDER_WINDOW_DESC& primaryWindowDesc)
 	{
 	{

+ 4 - 1
Source/BansheeCore/Renderer/BsLightProbeVolume.h

@@ -90,7 +90,10 @@ namespace bs
 	public:
 	public:
 		~LightProbeVolume();
 		~LightProbeVolume();
 
 
-		/** Adds a new probe at the specified position and returns a handle to the probe. */
+		/** 
+		 * Adds a new probe at the specified position and returns a handle to the probe. The position is relative to
+		 * the volume origin.
+		 */
 		UINT32 addProbe(const Vector3& position);
 		UINT32 addProbe(const Vector3& position);
 
 
 		/** Updates the position of the probe with the specified handle. */
 		/** Updates the position of the probe with the specified handle. */

+ 1 - 1
Source/BansheeCore/Renderer/BsRenderSettings.cpp

@@ -120,7 +120,7 @@ namespace bs
 
 
 	RenderSettings::RenderSettings()
 	RenderSettings::RenderSettings()
 		: enableAutoExposure(true), enableTonemapping(true), enableFXAA(true), exposureScale(0.0f), gamma(2.2f)
 		: enableAutoExposure(true), enableTonemapping(true), enableFXAA(true), exposureScale(0.0f), gamma(2.2f)
-		, enableHDR(true), enableLighting(true), enableShadows(true), enableIndirectLighting(true), overlayOnly(false)
+		, enableHDR(true), enableLighting(true), enableShadows(true), enableIndirectLighting(false), overlayOnly(false)
 	{ }
 	{ }
 
 
 	RTTITypeBase* RenderSettings::getRTTIStatic()
 	RTTITypeBase* RenderSettings::getRTTIStatic()

+ 0 - 3
Source/BansheeCore/Resources/BsResources.cpp

@@ -697,9 +697,6 @@ namespace bs
 
 
 	void Resources::registerResourceManifest(const SPtr<ResourceManifest>& manifest)
 	void Resources::registerResourceManifest(const SPtr<ResourceManifest>& manifest)
 	{
 	{
-		if(manifest->getName() == "Default")
-			return;
-
 		auto findIter = std::find(mResourceManifests.begin(), mResourceManifests.end(), manifest);
 		auto findIter = std::find(mResourceManifests.begin(), mResourceManifests.end(), manifest);
 		if(findIter == mResourceManifests.end())
 		if(findIter == mResourceManifests.end())
 			mResourceManifests.push_back(manifest);
 			mResourceManifests.push_back(manifest);

+ 3 - 3
Source/BansheeCore/Scene/BsSceneManager.h

@@ -55,6 +55,9 @@ namespace bs
 		 */
 		 */
 		void clearScene(bool forceAll = false);
 		void clearScene(bool forceAll = false);
 
 
+		/** Changes the root scene object. Any persistent objects will remain in the scene, now parented to the new root. */
+		void setRootNode(const HSceneObject& root);
+
 		/** 
 		/** 
 		 * Changes the component state that globally determines which component callbacks are activated. Only affects
 		 * Changes the component state that globally determines which component callbacks are activated. Only affects
 		 * components that don't have the ComponentFlag::AlwaysRun flag set. 
 		 * components that don't have the ComponentFlag::AlwaysRun flag set. 
@@ -111,9 +114,6 @@ namespace bs
 		/**	Notifies the scene manager that a camera either became the main camera, or has stopped being main camera. */
 		/**	Notifies the scene manager that a camera either became the main camera, or has stopped being main camera. */
 		void _notifyMainCameraStateChanged(const SPtr<Camera>& camera);
 		void _notifyMainCameraStateChanged(const SPtr<Camera>& camera);
 
 
-		/** Changes the root scene object. Any persistent objects will remain in the scene, now parented to the new root. */
-		void _setRootNode(const HSceneObject& root);
-
 		/** Called every frame. Calls update methods on all scene objects and their components. */
 		/** Called every frame. Calls update methods on all scene objects and their components. */
 		void _update();
 		void _update();