| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520 |
- /**
- \page Containers Container types
- Urho3D implements its own string type and template containers instead of using STL. The rationale for this consists of the following:
- - Increased performance in some cases, for example when using the PODVector class.
- - Reduced size of each string or vector instance compared to the MSVC STL implementations.
- - Reduced compile time.
- - Straightforward naming and implementation that aids in debugging and profiling.
- - Convenient member functions can be added, for example String::Split() or Vector::Compact().
- - Consistency with the rest of the classes, see \ref CodingConventions "Coding conventions".
- The classes in question are String, Vector, PODVector, List, Set, Map, HashSet and HashMap. PODVector is only to be used when the elements of the vector need no construction or destruction and can be moved with a block memory copy.
- The list, set and map classes use a fixed-size allocator internally. This can also be used by the application, either by using the procedural functions AllocatorInitialize(), AllocatorUninitialize(), AllocatorReserve() and AllocatorFree(), or through the template class Allocator.
- In script, the String class is exposed as it is. The template containers can not be directly exposed to script, but instead a template Array type exists, which behaves like a Vector, but does not expose iterators. In addition the VariantMap is available, which is a Map<ShortStringHash, Variant>.
- \page ObjectTypes %Object types and factories
- Classes that derive from Object contain type-identification, they can be created through object factories, and they can send and receive \ref Events "events". Examples of these are all Component, Resource and UIElement subclasses. To be able to be constructed by a factory, they need to have a constructor that takes a Context pointer as the only parameter.
- %Object factory registration and object creation through factories are directly accessible only in C++, not in script.
- The definition of an Object subclass must contain the OBJECT(className) macro. Type identification is available both as text (GetTypeName() or GetTypeNameStatic()) and as a 16-bit hash of the type name (GetType() or GetTypeStatic()).
- In addition the OBJECTTYPESTATIC(className) macro must appear in a .cpp file to actually define the type identification data. The reason for this instead of defining the data directly inside the OBJECT macro as function-static data is thread safety: function-static data is initialized on the first use, and if the first call to an object's GetTypeStatic() or GetTypeNameStatic() happened on several threads simultaneously, the results would be undefined.
- To register an object factory for a specific type, call the \ref Context::RegisterFactory "RegisterFactory()" template function on Context. You can get its pointer from any Object either via the \ref Object::context_ "context_" member variable, or by calling \ref Object::GetContext "GetContext()". An example:
- \code
- context_->RegisterFactory<MyClass>();
- \endcode
- To create an object using a factory, call Context's \ref Context::CreateObject "CreateObject()" function. This takes the 16-bit hash of the type name as a parameter. The created object (or null if there was no matching factory registered) will be returned inside a SharedPtr<Object>. For example:
- \code
- SharedPtr<Object> newComponent = context_->CreateObject(type));
- \endcode
- \page Subsystems Subsystems
- Any Object can be registered to the Context as a subsystem, by using the function \ref Context::RegisterSubsystem "RegisterSubsystem()". They can then be accessed by any other Object inside the same context by calling \ref Object::GetSubsystem "GetSubsystem()". Only one instance of each object type can exist as a subsystem.
- After Engine initialization, the following subsystems will always exist:
- - Time: manages frame updates, frame number and elapsed time counting, and controls the frequency of the operating system low-resolution timer.
- - WorkQueue: executes background tasks in worker threads.
- - FileSystem: provides directory operations.
- - Log: provides logging services.
- - ResourceCache: loads resources and keeps them cached for later access.
- - Network: provides UDP networking and scene replication.
- - Input: handles keyboard and mouse input. Will be inactive in headless mode.
- - UI: the graphical user interface. Will be inactive in headless mode.
- - Audio: provides sound output. Will be inactive if sound disabled.
- - Engine: creates the other subsystems and controls the main loop iteration and framerate limiting.
- The following subsystems are optional, so GetSubsystem() may return null if they have not been created:
- - Profiler: Provides hierarchical function execution time measurement using the operating system performance counter. Exists if profiling has been compiled in (configurable from the root CMakeLists.txt)
- - Graphics: Manages the application window, the rendering context and resources. Exists if not in headless mode.
- - Renderer: Renders scenes in 3D and manages rendering quality settings. Exists if not in headless mode.
- - Script: Provides the AngelScript execution environment. Created by calling \ref Engine::InitializeScripting "InitializeScripting()".
- - Console: provides an interactive AngelScript console and log display. Created by calling \ref Engine::CreateConsole "CreateConsole()".
- - DebugHud: displays rendering mode information and statistics and profiling data. Created by calling \ref Engine::CreateDebugHud "CreateDebugHud()".
- In script, the subsystems are available through the following global properties:
- time, fileSystem, log, cache, network, input, ui, audio, engine, graphics, renderer, script, console, debugHud. Note that WorkQueue and Profiler are not available to script due to their low-level nature.
- \page Events Events
- The Urho3D event system allows for data transport and function invocation without the sender and receiver having to explicitly know of each other. It supports both broadcast and targeted events. Both the event sender and receiver must derive from Object. An event receiver must subscribe to each event type it wishes to receive: one can either subscribe to the event coming from any sender, or from a specific sender. The latter is useful for example when handling events from the user interface elements.
- Events themselves do not need to be registered. They are identified by 32-bit hashes of their names. Event parameters (the data payload) are optional and are contained inside a VariantMap, identified by 16-bit parameter name hashes. For the inbuilt Urho3D events, event type (E_UPDATE, E_KEYDOWN, E_MOUSEMOVE etc.) and parameter hashes (P_TIMESTEP, P_DX, P_DY etc.) are defined as constants inside include files such as CoreEvents.h or InputEvents.h.
- When subscribing to an event, a handler function must be specified. In C++ these must have the signature void HandleEvent(StringHash eventType, VariantMap& eventData). The HANDLER(className, function) macro helps in defining the required class-specific function pointers. For example:
- \code
- SubscribeToEvent(E_UPDATE, HANDLER(MyClass, MyEventHandler));
- \endcode
- In script events are identified by their string names instead of name hashes (though these are internally converted to hashes.) Script event handlers can either have the same signature as in C++, or a simplified signature void HandleEvent() when event type and parameters are not required. The same event subscription would look like:
- \code
- SubscribeToEvent("Update", "MyEventHandler");
- \endcode
- In C++ events must always be handled by a member function. In script procedural event handling is also possible; in this case the ScriptFile where the event handler function is located becomes the event receiver. See \ref Scripting "Scripting" for more details.
- Events can also be unsubscribed from. See \ref Object::UnsubscribeFromEvent "UnsubscribeFromEvent()" for details.
- To send an event, fill the event parameters (if necessary) and call \ref Object::SendEvent "SendEvent()". For example, this (in C++) is how the Engine subsystem sends the Update event on each frame. As no target object is specified, this is a broadcast event. Note how for the inbuilt Urho3D events, the parameter name hashes are always put inside a namespace (the event's name) to prevent name clashes:
- \code
- using namespace Update;
- VariantMap eventData;
- eventData[P_TIMESTEP] = timeStep_;
- SendEvent(E_UPDATE, eventData);
- \endcode
- In script event parameters, like event types, are referred to with strings, so the same code would look like:
- \code
- VariantMap eventData;
- eventData["TimeStep"] = timeStep;
- SendEvent("Update", eventData);
- \endcode
- A targeted event sent to a Node will be handled specially: it will be forwarded to all its components. This can be used to implement game-specific messaging, such as scoring and damage handling. The \ref Physics "physics simulation" uses this mechanism to inform the participating scene nodes of physics collisions.
- \page MainLoop Main loop and frame update
- The main loop iteration (also called a frame) is driven by the Engine. In contrast it is the program's (for example Urho3D.exe) responsibility to continuously loop this iteration. The iteration consists of the Engine calling the Time subsystem's \ref Time::BeginFrame "BeginFrame()" and \ref Time::EndFrame "EndFrame()" functions, and in between sending various update events. The event order is:
- - E_BEGINFRAME: signals the beginning of the new frame. Input and Network react to this to check for operating system window messages and arrived network packets.
- - E_UPDATE: application-wide logic update event. By default each active Scene reacts to this and triggers the scene update (more on this below.)
- - E_POSTUPDATE: application-wide logic post-update event. The UI subsystem updates its logic here.
- - E_RENDERUPDATE: Renderer updates its viewports here to prepare for rendering, and the UI generates render commands necessary to render the user interface.
- - E_POSTRENDERUPDATE: by default nothing hooks to this. This can be used to implement logic that requires the rendering views to be up-to-date (for example to do accurate raycasts.) Scenes may not be modified at this point (especially scene objects may not be deleted or crashes may occur.)
- - E_ENDFRAME: signals the end of the frame. Before this, rendering the frame and measuring the next frame's timestep will have occurred.
- The update of each Scene causes further events to be sent:
- - E_SCENEUPDATE: variable timestep scene update. This is a good place to implement any scene logic that does not need to happen at a fixed step.
- - E_SCENESUBSYSTEMUPDATE: update scene-wide subsystems. Currently only the PhysicsWorld component listens to this, which causes it to step the physics simulation and send the following two events for each simulation step:
- - E_PHYSICSPRESTEP: called before the simulation iteration. Happens at a fixed rate (the physics FPS.) If fixed timestep logic updates are needed, this is a good event to listen to.
- - E_PHYSICSPOSTSTEP: called after the simulation iteration. Happens at the same rate as E_PHYSICSPRESTEP.
- - E_SMOOTHINGUPDATE: update SmoothedTransform components in network client scenes.
- - E_SCENEPOSTUPDATE: variable timestep scene post-update. ParticleEmitter and AnimationController update themselves as a response to this event.
- Variable timestep logic updates are preferable to fixed timestep, because they are only executed once per frame. In contrast, if the rendering framerate is low, several physics simulation steps will be performed on each frame to keep up the apparent passage if time, and if this also causes a lot of logic code to be executed for each step, the program may bog down further if the CPU can not handle the load. Note that the Engine's \ref Engine::SetMinFps "minimum FPS", by default 10, sets a hard cap for the timestep to prevent spiraling down to a complete halt; if exceeded, animation and physics will instead appear to slow down.
- \page SceneModel %Scene model
- Urho3D's scene model can be described as a component-based scene graph. The Scene consists of a hierarchy of scene nodes, starting from the root node, which also represents the whole scene. Each Node has a 3D transform (position, rotation and scale), a name and an ID, and a freeform VariantMap for \ref Node::GetVars "user variables", but no other functionality.
- \section SceneModel_Components Components
- Rendering 3D objects, sound playback, physics and scripted logic updates are all enabled by creating different \ref Component "Components" into the nodes by calling \ref Node::CreateComponent "CreateComponent()". As with events, in C++ components are identified by type name hashes, and template forms of the component creation and retrieval functions exist for convenience. For example:
- \code
- Light* light = node->CreateComponent<Light>();
- \endcode
- In script, strings are used to identify component types instead, so the same code would look like:
- \code
- Light@ light = node.CreateComponent("Light");
- \endcode
- Because components are created using \ref ObjectTypes "object factories", a factory must be registered for each component type.
- Components created into the Scene itself have a special role: to implement scene-wide functionality. They should be created before all other components, and include the following:
- - Octree: implements spatial partitioning and accelerated visibility queries. Without this 3D objects can not be rendered.
- - PhysicsWorld: implements physics simulation. Physics components such as RigidBody or CollisionShape can not function properly without this.
- - DebugRenderer: implements debug geometry rendering.
- "Ordinary" components like Light, Camera or StaticModel should not be created directly into the Scene, but rather into child nodes.
- \section SceneModel_Identification Identification and scene hierarchy
- Unlike nodes, components do not have names; components inside the same node are only identified by their type, and index in the node's component list, which is filled in creation order. See the various overloads of \ref Node::GetComponent "GetComponent()" or \ref Node::GetComponents "GetComponents()" for details.
- When created, both nodes and components get scene-global integer IDs. They can be queried from the Scene by using the functions \ref Scene::GetNodeByID "GetNodeByID()" and \ref Scene::GetComponentByID "GetComponentByID()". This is much faster than for example doing recursive name-based scene node queries.
- There is no inbuilt concept of an entity or a game object; rather it is up to the programmer to decide the node hierarchy, and in which nodes to place any scripted logic. Typically, free-moving objects in the 3D world would be created as children of the root node. Nodes can be created either with or without a name, see \ref Node::CreateChild "CreateChild()". Uniqueness of node names is not enforced.
- Whenever there is some hierarchical composition, it is recommended (and in fact necessary, because components do not have their own 3D transforms) to create a child node. For example if a character was holding an object in his hand, the object should have its own node, which would be parented to the character's hand bone (also a Node.) The exception is the physics CollisionShape, which can be offsetted and rotated individually in relation to the node. See \ref Physics "Physics" for more details.
- %Scene nodes can be freely reparented. In contrast components are always created to the node they belong to, and can not be moved between nodes. Both child nodes and components are stored using SharedPtr containers; this means that detaching a child node from its parent or removing a component will also destroy it, if no other references to it exist. Both Node & Component provide the \ref Node::Remove "Remove()" function to accomplish this without having to go through the parent. Note that no operations on the node or component in question are safe after calling that function.
- It is also legal to create a Node that does not belong to a scene. This is particularly useful with cameras, because then the camera will not be serialized along with the actual scene, which is perhaps not always wanted.
- \section SceneModel_Update Scene updates and serialization
- A Scene can be either active or inactive (paused.) Active scenes will be automatically updated on each main loop iteration. See \ref Scene::SetActive "SetActive()".
- Scenes can be loaded and saved in either binary or XML format; see \ref Serialization "Serialization" for details.
- \section SceneModel_FurtherInformation Further information
- For more information on the component-based scene model, see for example http://cowboyprogramming.com/2007/01/05/evolve-your-heirachy/.
- \page Resources Resources
- Resources include most things in Urho3D that are loaded from mass storage during initialization or runtime:
- - Animation
- - Image
- - Model
- - Material
- - ScriptFile
- - Shader
- - Sound
- - Technique
- - Texture2D
- - TextureCube
- - XMLFile
- They are managed and loaded by the ResourceCache subsystem. Like with all other \ref ObjectTypes "typed objects", resource types are identified by 16-bit type name hashes (C++) or type names (script). An object factory must be registered for each resource type.
- The resources themselves are identified by their file paths, relative to the registered resource directories or \ref PackageFile "package files". By default, Urho3D.exe registers the resource directories Data and CoreData, or the packages Data.pak and CoreData.pak if they exist.
- If loading a resource fails, an error will be logged and a null pointer is returned.
- Typical C++ example of requesting a resource from the cache, in this case, a texture for a UI element. Note the use of a convenience template argument to specify the resource type, instead of using the type hash.
- \code
- healthBar->SetTexture(GetSubsystem<ResourceCache>()->GetResource<Texture2D>("Textures/HealthBarBorder.png"));
- \endcode
- The same in script would look like this (note the use of a property instead of a setter function):
- \code
- healthBar.texture = cache.GetResource("Texture2D", "Textures/HealthBarBorder.png");
- \endcode
- Resources can also be created manually and stored to the resource cache as if they had been loaded from disk.
- Memory budgets can be set per resource type: if resources consume more memory than allowed, the oldest resources will be removed from the cache if not in use anymore. By default the memory budgets are set to unlimited.
- \page Scripting Scripting
- There are three ways the AngelScript language can be interacted with in Urho3D:
- \section Scripting_Immediate Immediate execution
- Immediate execution takes one line of AngelScript, compiles it, and executes. This is not recommended for anything that needs high performance, but can be used for example to implement a developer console. Call the Script subsystem's \ref Script::Execute "Execute()" function to use. For example:
- \code
- GetSubsystem<Script>()->Execute("Print(\"Hello World!\");");
- \endcode
- It may be useful to be able to access a specific scene or a script file while executing immediate script code. These can be set on the Script subsystem by calling \ref Script::SetDefaultScene "SetDefaultScene()" and \ref Script::SetDefaultScriptFile "SetDefaultScriptFile()".
- \section Scripting_Procedural Calling a function from a script file
- This requires a successfully loaded ScriptFile resource, whose \ref ScriptFile::Execute "Execute()" function will be used. To identify the function to be called, its full declaration is needed. Parameters are passed in a VariantVector. For example:
- \code
- ScriptFile* file = GetSubsystem<ResourceCache>()->GetResource<ScriptFile>("Scripts/MyScript.as");
- VariantVector parameters;
- parameters.Push(Variant(100)); // Add an int parameter
- file->Execute("void MyFunction(int)", parameters); // Execute
- \endcode
- \ref ScriptFile::Execute "Execute()" also has an overload which takes a function pointer instead of querying by declaration. Using a pointer is naturally faster than a query, but note that the query results are also stored to an internal cache, so repeated queries for the same declaration do not need to go to the AngelScript module level each time. Storing function pointers is risky in case the ScriptFile resource is reloaded, because then the pointers will be invalidated.
- \section Scripting_Object Instantiating a script object
- The component ScriptInstance can be used to instantiate a specific class from within a script file. After this the script object can respond to scene updates, \ref Events "events" and \ref Serialization "serialization" much like a component written in C++ would do, if it has the appropriate methods implemented. For example:
- \code
- ScriptInstance* instance = node->CreateComponent<ScriptInstance>();
- instance->CreateObject(GetSubsystem<ResourceCache>()->GetResource<ScriptFile>("Scripts/MyClass.as"), "MyClass");
- \endcode
- The class must implement the empty interface ScriptObject, so that the object can also be accessed from script using ScriptInstance's \ref ScriptInstance::GetScriptObject "GetScriptObject()" function.
- The following methods that implement the component behaviour will be checked for. None of them are required.
- - void Start()
- - void Stop()
- - void Update(float)
- - void PostUpdate(float)
- - void FixedUpdate(float)
- - void FixedPostUpdate(float)
- - void Save(Serializer&)
- - void Load(Deserializer&)
- - void ApplyAttributes()
- The update methods above correspond to the variable timestep scene update and post-update, and the fixed timestep physics world update and post-update. The application-wide update events are not handled by default.
- The Start() and Stop() methods do not have direct counterparts in C++ components. Start() is called just after the script object has been created. Stop() is called just before the script object is destroyed. This happens when the ScriptInstance is destroyed, or if the script class is changed.
- Subscribing to \ref Events "events" in script behaves differently depending on whether \ref Object::SubscribeToEvent "SubscribeToEvent()" is called from a script object's method, or from a procedural script function. If called from an object method, the ScriptInstance becomes the event receiver on the C++ side, and forwards the events to the script object. If called from a function, the ScriptFile will be the event receiver.
- The script object's active/inactive state can be controlled through the \ref ScriptInstance::SetActive "SetActive()" function. When inactive, the scripted update methods or event handlers will not be called. This can be used to reduce CPU load in a large or densely populated scene.
- There are shortcut methods on the script side for creating and accessing a node's script object: node.CreateScriptObject() and node.GetScriptObject() (alternatively, if the node has only one ScriptInstance, and a specific class is not needed, the node's scriptObject property can also be used.) These are not actual Node member functions on the C++ side. CreateScriptObject() takes the script file name (or alternatively, a ScriptFile object handle) and class name as parameters and creates a ScriptInstance component automatically, then creates the script object. For example:
- \code
- ScriptObject@ object = node.CreateScriptObject("Scripts/MyClass.as", "MyClass");
- \endcode
- \section Script_ScriptAPI The script API
- Much of the Urho3D classes are exposed to scripts, however things that require low-level access or high performance (like direct vertex buffer access) are not. Also for scripting convenience some things have been changed from the C++ API:
- - The template array and string classes are exposed as Array<type> and String.
- - Public member variables are exposed without the underscore appended. For example x, y, z in Vector3.
- - Whenever only a single parameter is needed, setter and getter functions are replaced with properties. Such properties start with a lowercase letter. If an index parameter is needed, the property will be indexed. Indexed properties are in plural.
- - The element count property of arrays and other dynamic structures such as VariantMap and ResourceRefList is called "length", though the corresponding C++ function is usually Size().
- - Subsystems exist as global properties: time, fileSystem, log, cache, network, input, ui, audio, engine, graphics, renderer, script, console, debugHud.
- - Additional global properties exist for accessing the script object's node, the scene and the scene-wide components: node, scene, octree, physicsWorld, debugRenderer. When an object method is not executing, these are null. An exception: when the default scene for immediate execution has been set by calling \ref Script::SetDefaultScene "SetDefaultScene()", it is always available as "scene".
- - The currently executing script object's ScriptInstance component is available through the global property self.
- - The currently executing script file is available through the global property scriptFile.
- - The first script object created to a node is available as its scriptObject property.
- - Printing raw output to the log is simply called Print(). The rest of the logging functions are accessed by calling log.Debug(), log.Info(), log.Warning() and log.Error().
- - Functions that would take a StringHash or ShortStringHash parameter usually take a string instead. For example sending events, requesting resources and accessing components.
- - Most of StringUtils have been exposed as methods of the string class. For example String.ToBool().
- - Template functions for getting components or resources by type are not supported. Instead automatic type casts are performed as necessary.
- \section Scripting_Limitations Limitations
- There are some complexities of the scripting system one has to watch out for:
- - During the execution of the script object's constructor, the object is not yet associated with the ScriptInstance, and therefore subscribing to events, or trying to access the node or scene will fail. The use of the constructor is best reserved for initializing member variables only.
- - There is a maximum allowed nesting level (currently 32) for execution that moves between C++ & AngelScript. Nested execution typically occurs if you send an event to another ScriptInstance from a scripted event handler. If the nesting level is exceeded, an error will be logged and the script code that would have required the extra nesting level will not be executed.
- - When the resource request for a particular ScriptFile is initially made, the script file and the files it includes are compiled into an AngelScript script module. Each script module has its own class hierarchy that is not usable from other script modules, unless the classes are declared shared. See AngelScript documentation for more details.
- - If a ScriptFile resource is reloaded, all the script objects created from it will be destroyed, then recreated. They will lose any stored state as their constructors and Start() methods will be run again. This is rarely useful when running an actual game, but may be helpful during development.
- \section Scripting_Modifications AngelScript modifications
- The following changes have been made to AngelScript in Urho3D:
- - For performance reasons and to guarantee immediate removal of expired objects, AngelScript garbage collection has been disabled for script classes and the Array type. This has the downside that circular references will not be detected. Therefore, whenever you have object handles in your script, think of them as if they were C++ shared pointers and avoid creating circular references with them.
- - %Object handle assignment can be done without the @ symbol if the object in question does not support value assignment. All exposed Urho3D C++ classes that derive from RefCounted never support value assignment. For example, when assigning the Model and Material of a StaticModel component:
- \code
- object.model = cache.GetResource("Model", "Models/Mushroom.mdl");
- object.material = cache.GetResource("Material", "Materials/Mushroom.xml");
- \endcode
- In unmodified AngelScript, this would have to be written as:
- \code
- @object.model = cache.GetResource("Model", "Models/Mushroom.mdl");
- @object.material = cache.GetResource("Material", "Materials/Mushroom.xml");
- \endcode
- \page Rendering Rendering
- Much of the rendering functionality in Urho3D is built on two subsystems, Graphics and Renderer, contained within the %Graphics library.
- \section Rendering_Graphics Graphics
- Graphics implements the low-level functionality:
- - Creating the window and the rendering context
- - Setting the screen mode
- - Keeping track of GPU resources
- - Keeping track of rendering context state (current rendertarget, vertex and index buffers, textures, shaders and renderstates)
- - Handling lost device
- - Performing primitive rendering operations
- Screen resolution, fullscreen/windowed, vertical sync and hardware multisampling level are all set at once by calling Graphics's \ref Graphics::SetMode "SetMode()" function.
- When setting the initial screen mode, Graphics does a few checks:
- - For Direct3D9, the supported shader model is checked. 2.0 is minimum, but 3.0 will be used if available. %Shader model 2.0 can be forced by calling \ref Graphics::SetForceSM2 "SetForceSM2()".
- - For OpenGL, version 2.0 with EXT_framebuffer_object, EXT_packed_depth_stencil, EXT_texture_compression_s3tc and EXT_texture_filter_anisotropic extensions is checked for.
- - Are hardware shadow maps supported? Both ATI & NVIDIA style shadow maps can be used. If neither are available, no shadows will be rendered.
- - Are light pre-pass and deferred rendering modes supported? These require sufficient multiple rendertarget support, and either R32F texture format or readable hardware depth.
- \section Rendering_Renderer Renderer
- Renderer implements the actual rendering of 3D views each frame, and controls global settings such as texture quality, material quality, specular lighting and shadow map base resolution.
- To render, it needs a Scene with an Octree component, and a Camera that does not necessarily have to belong to the scene. The octree stores all visible components (derived from Drawable) to allow querying for them in an accelerated manner. The scene, camera and screen rectangle to use are set with Renderer's \ref Renderer::SetViewport "SetViewport()" function.
- By default there is one viewport, but the amount can be increased with the function \ref Renderer::SetNumViewports "SetNumViewports()". The viewport(s) should cover the entire screen or otherwise hall-of-mirrors artifacts may occur. By specifying a zero screen rectangle the whole window will be used automatically. The viewports will be rendered in ascending order, so if you want for example to have a small overlay window on top of the main viewport, use viewport index 0 for the main view, and 1 for the overlay.
- Viewports can have a chain of post-processing effects. See \ref Postprocessing "Post-processing" for more details.
- Either forward, light pre-pass or deferred rendering can be chosen, see \ref Renderer::SetRenderMode "SetRenderMode()". Deferred rendering modes will be advantageous once there is a large number of per-pixel lights affecting each object, but their disadvantages are the lack of hardware multisampling and inability to choose the lighting model per material. In place of multisample antialiasing, a FXAA post-processing edge filter can be used, see the TestScene script application for an example of how to use.
- The steps for rendering each viewport on each frame are roughly the following:
- - Query the octree for visible objects and lights in the camera's view frustum.
- - Check the influence of each visible light on the objects. If the light casts shadows, query the octree for shadowcaster geometries.
- - Construct render operations (batches) for the visible objects.
- - Perform these render operations during the rendering step at the end of the frame.
- The rendering operations proceed in the following order:
- - Opaque geometry ambient pass, or G-buffer pass in deferred rendering modes.
- - Opaque geometry per-pixel lighting passes. For shadow casting lights, the shadow map is rendered first.
- - (%Light pre-pass only) Opaque geometry material pass, which renders the objects with accumulated per-pixel lighting.
- - Pre-alpha rendering pass for custom render ordering such as the skybox.
- - Transparent geometry rendering pass. Transparent, alpha-blended objects are sorted according to distance and rendered back-to-front to ensure correct blending.
- - Post-alpha rendering pass.
- \section Rendering_Drawable Rendering components
- The rendering-related components defined by the %Graphics library are:
- - Octree: spatial partitioning of Drawables for accelerated visibility queries. Needs to be created to the Scene (root node.)
- - Camera: describes a viewpoint for rendering, including projection parameters (FOV, near/far distance, perspective/orthographic)
- - Drawable: Base class for anything visible.
- - StaticModel: non-skinned geometry. Can LOD transition according to distance.
- - Skybox: a subclass of StaticModel that appears to always stay in place.
- - AnimatedModel: skinned geometry that can do skeletal and vertex morph animation.
- - AnimationController: drives AnimatedModel's animations forward automatically and controls animation fade-in/out.
- - BillboardSet: a group of camera-facing billboards, which can have varying sizes, rotations and texture coordinates.
- - ParticleEmitter: a subclass of BillboardSet that emits particle billboards.
- - Light: illuminates the scene. Can optionally cast shadows.
- - Zone: defines ambient light and fog settings for objects inside the zone volume.
- \section Rendering_Optimizations Optimizations
- The following techniques will be used to reduce the amount of CPU and GPU work when rendering. By default they are all on:
- - Software rasterized occlusion: after the octree has been queried for visible objects, the objects that are marked as occluders are rendered on the CPU to a small hierarchical-depth buffer, and it will be used to test the non-occluders for visibility. Use \ref Renderer::SetMaxOccluderTriangles "SetMaxOccluderTriangles()" and \ref Renderer::SetOccluderSizeThreshold "SetOccluderSizeThreshold()" to configure the occlusion rendering.
- - Hardware instancing (Direct3D9 SM3.0 only): rendering operations with the same geometry, material and light will be grouped together and performed as one draw call. Objects with a large amount of triangles will not be rendered as instanced, as that could actually be detrimental to performance. Use \ref Renderer::SetMaxInstanceTriangles "SetMaxInstanceTriangles()" to set the threshold. Note that even when instancing is not available, or the triangle count of objects is too large, they still benefit from the grouping, as render state only needs to be set once before rendering each group, reducing the CPU cost.
- - %Light stencil masking: in forward rendering, before objects lit by a spot or point light are re-rendered additively, the light's bounding shape is rendered to the stencil buffer to ensure pixels outside the light range are not processed.
- Note that many more optimization opportunities are possible at the content level, for example using geometry & material LOD, grouping many static objects into one object for less draw calls, minimizing the amount of subgeometries (submeshes) per object for less draw calls, using texture atlases to avoid render state changes, using compressed (and smaller) textures, and setting maximum draw distances for objects, lights and shadows.
- \section Rendering_Further Further details
- See also \ref Materials "Materials", \ref Lights "Lights and shadows", \ref SkeletalAnimation "Skeletal animation", \ref Particles "Particle systems", \ref Postprocessing "Post-processing", \ref Zones "Zones", and \ref AuxiliaryViews "Auxiliary views".
- See \ref RenderingModes "Rendering modes" for detailed discussion on the forward, light pre-pass and deferred rendering modes.
- See \ref APIDifferences "Differences between Direct3D9 and OpenGL" for what to watch out for when using the low-level rendering functionality directly.
- \page RenderingModes Rendering modes
- Urho3D implements both forward, light pre-pass and deferred rendering modes. Where they differ is how per-pixel lighting is calculated for opaque objects; transparent objects always use forward rendering.
- \section RenderingModes_Forward Forward rendering
- Forward rendering begins with an ambient light pass for the objects; this also adds any per-vertex lights. Then, the objects are re-rendered for each per-pixel light affecting them (basic multipass rendering), up to the maximum per-pixel light count which is by default unlimited, but can be reduced with \ref Drawable::SetMaxLights "SetMaxLights()". The render operations are sorted by light, ie. render the effect of the first light on all affected objects first, then the second etc. If shadow maps are re-used (default on), a shadow casting light's shadow map will be updated immediately before rendering the lit objects. When shadow maps are not re-used, all shadow maps are updated first even before drawing the ambient pass.
- Materials can also define an optimization pass for forward rendering where the ambient light and the first per-pixel light are combined. This pass can not be used, however, if there are per-vertex lights affecting the object, or if the ambient light has a per-vertex gradient.
- \section RenderingModes_Prepass Light pre-pass rendering
- %Light pre-pass requires a minimum of two passes per object. First the normal, specular power, depth and lightmask (8 low bits only) of opaque objects are rendered to the following G-buffer. If the INTZ readable hardware depth-stencil texture format is available, the second color rendertarget will be omitted:
- - RT0: World-space normal and specular power (D3DFMT_A8R8G8B8)
- - RT1: Linear depth (D3DFMT_R32F)
- - DS: Hardware depth and lightmask (D3DFMT_D24S8 or INTZ)
- After the G-buffer is complete, light volumes (spot and point lights) or fullscreen quads (directional lights) will be rendered to a light accumulation buffer to calculate the diffuse and specular light at each opaque pixel. Specular light is stored as intensity only. Stencil compare (AND operation) with the 8 low bits of the light's lightmask will be used for light culling. Similarly to forward rendering, shadow maps will be updated before each light as necessary.
- Finally the opaque objects are re-rendered during the material pass, which combines ambient and vertex lighting with per-pixel lighting from the light accumulation buffer. After this rendering proceeds to the pre-alpha pass, transparent object rendering pass, and the post-alpha pass, just like forward rendering.
- \section RenderingModes_Deferred Deferred rendering
- Deferred rendering needs to render each opaque object only once to the G-buffer, but this rendering pass is much heavier than in light pre-pass rendering, as also ambient, emissive and diffuse albedo information is output at the same time. The G-buffer is the following, with the last color rendertarget omitted if hardware depth can be read:
- - RT0: Final rendertarget with ambient, per-vertex and emissive color (D3DFMT_X8R8G8B8)
- - RT1: Diffuse albedo and specular intensity (D3DFMT_A8R8G8B8)
- - RT2: World-space normal and specular power (D3DFMT_A8R8G8B8)
- - RT3: Linear depth (D3DFMT_R32F)
- - DS: Hardware depth and lightmask (D3DFMT_D24S8 or INTZ)
- After the G-buffer has been rendered, light volumes will be rendered into the final rendertarget to accumulate per-pixel lighting. As the material albedo is available, all lighting calculations are final and output both the diffuse and specular color at the same time. After light accumulation rendering proceeds to pre-alpha, transparent, and post-alpha passes, as in other rendering modes.
- \section RenderingModes_Comparision Advantages and disadvantages
- Whether using forward or deferred rendering modes is more advantageous depends on the scene and lighting complexity.
- If the scene contains a large number of complex objects lit by multiple lights, forward rendering quickly increases the total draw call and vertex count due to re-rendering the objects for each light. However, light pre-pass and deferred rendering have a higher fixed cost due to the generation of the G-buffer. Also, in forward per-pixel lighting more calculations (such as light direction and shadow map coordinates) can be done at the vertex shader level, while in deferred all calculations need to happen per-pixel. This means that for a low light count, for example 1-2 per object, forward rendering will run faster based on the more efficient lighting calculations alone.
- Forward rendering makes it possible to use hardware multisampling and different shading models in different materials if needed, while neither is possible in the deferred modes. Also, only forward rendering allows to calculate the material's diffuse and specular light response with the most accuracy. %Light pre-pass rendering needs to reconstruct light specular color from the accumulated diffuse light color, which is inaccurate in case of overlapping lights. Deferred rendering on the other hand can not use the material's full specular color, it only stores a monochromatic intensity based on the green component into the G-buffer.
- %Light pre-pass rendering has a much more lightweight G-buffer pass, but it must render all opaque geometry twice. %Light accumulation in pre-pass mode is slightly faster than in deferred. Despite this, unless there is significant overdraw, in vertex-heavy scenes deferred rendering will likely be faster than light pre-pass.
- Finally note that due to OpenGL framebuffer object limitations an extra framebuffer blit has to happen at the end in both light pre-pass and deferred rendering, which costs some performance. Also, because multiple rendertargets on OpenGL must have the same format, an R32F texture can not be used for linear depth, but instead 24-bit depth is manually encoded and decoded into RGB channels.
- \page APIDifferences Differences between Direct3D9 and OpenGL
- - On OpenGL vertex attribute bindings depend on the currently set shaders. To ensure correct operation, first set the shaders, then the vertex buffers.
- - On Direct3D9 the depth-stencil surface can be equal or larger in size than the color rendertarget. On OpenGL the sizes must always match. Furthermore, OpenGL can not use the backbuffer depth-stencil surface when rendering to a texture. To overcome these limitations, Graphics will create correctly sized depth-stencil surfaces on demand whenever a texture is set as a color rendertarget, and a null depth-stencil is specified.
- - On Direct3D9 the viewport will be reset to full size when the first color rendertarget is changed. On OpenGL this does not happen. To ensure correct operation on both APIs, always use this sequence: first set the rendertargets, then the depth-stencil surface and finally the viewport.
- - On OpenGL modifying a texture will cause it to be momentarily set on the first texture unit. If another texture was set there, the assignment will be lost. Graphics performs a check to not assign textures redundantly, so it is safe and recommended to always set all needed textures before rendering.
- - Modifying an index buffer on OpenGL will similarly cause the existing index buffer assignment to be lost. Therefore, always set the vertex and index buffers before rendering.
- - %Shader resources are stored in different locations depending on the API: CoreData/Shaders/SM2 or CoreData/Shaders/SM3 for Direct3D9, and CoreData/Shaders/GLSL for OpenGL.
- - On OpenGL there is never a "device lost" condition, which would cause dynamic textures or vertex/index buffers to lose their contents. However, when the screen mode is changed, the context (along with all GPU resources) will be manually destroyed and recreated. This would be strictly necessary only when changing the multisampling mode, but as bugs may otherwise occur with some GPU drivers, it is best to do for any mode change.
- - At least for now, instancing is not supported for OpenGL. It still benefits from the instance group rendering loop, which only changes the model transform for each object with the same material and light, instead of setting the whole renderstate.
- - To ensure similar UV addressing for render-to-texture viewports on both APIs, on OpenGL texture viewports will be rendered upside down.
- Note that these differences only need to be observed when writing custom rendering functionality and accessing Graphics directly. When using Renderer and the Drawable components, they are taken care of automatically.
- \page Materials Materials
- Material and Technique resources define how to render 3D scene geometry. On the disk, they are XML data. By default, materials exist in the CoreData/Materials & Data/Materials subdirectories, and techniques exist in the CoreData/Techniques subdirectory.
- A material defines the textures, shader parameters and culling mode to use, and refers to techniques. A technique defines the actual rendering passes, the shaders to use in each, and all other rendering states such as depth test, depth write, and blending.
- A material definition looks like this:
- \code
- <material>
- <technique name="TechniqueName" quality="q" loddistance="d" sm3="true|false" />
- <texture unit="diffuse|normal|specular|detail|environment|emissive" name="TextureName" />
- <texture ... />
- <parameter name="name" value="x y z w" />
- <parameter ... />
- <cull value="cw|ccw|none" />
- <shadowcull value="cw|ccw|none" />
- </material>
- \endcode
- %Technique quality levels are specified from 0 (low) to 2 (high). When rendering, the highest available technique that does not exceed the Renderer's material quality setting will be chosen, see \ref Renderer::SetMaterialQuality "SetMaterialQuality()". If a technique requires SM3.0-only shaders, it can be marked as such by the "sm3" attribute.
- When a material defines several techniques for different LOD levels and quality settings, they must appear in a specific order:
- - Most distant & highest quality
- - ...
- - Most distant & lowest quality
- - Second most distant & highest quality
- - ...
- %Material shader parameters can be floats or vectors up to 4 components. Matrix parameters are not supported.
- Default culling mode is counterclockwise. The shadowcull element specifies the culling mode to use in the shadow pass.
- \section Materials_Textures Material textures
- Diffuse maps specify the surface color in the RGB channels. Optionally they can use the alpha channel for blending and alpha testing. They should preferably be compressed to DXT1 (no alpha or 1-bit alpha) or DXT5 (smooth alpha) format.
- Normal maps encode the tangent-space surface normal for normal mapping. They need to be stored as xGxR, ie. Y-component in the green channel, and X-component in the alpha (Z will be reconstructed in the pixel shader.) This encoding lends itself well to DXT5 compression. To convert normal maps to this format, you can use AMD's The Compressonator utility, see http://developer.amd.com/tools/compressonator/pages/default.aspx
- Specular maps use only the G channel to specify specular intensity. DXT1 format should suit these well.
- \section Materials_Techniques Techniques and passes
- A technique definition looks like this:
- \code
- <technique>
- <pass name="base|litbase|light|prealpha|postalpha|prepass|material|deferred|shadow" vs="VertexShaderName" ps="PixelShaderName"
- alphatest="true|false" blend="replace|add|multiply|alpha|addalpha|premulalpha|invdestalpha"
- depthtest="always|equal|less|lessequal|greater|greaterequal" depthwrite="true|false" />
- <pass ... />
- <pass ... />
- </technique>
- \endcode
- The purposes of the different passes are:
- - base: Renders ambient light, per-vertex lights and fog.
- - litbase: Renders the first per-pixel light, ambient light and fog. This is an optional pass for optimization.
- - light: Renders one per-pixel light's contribution additively.
- - prealpha: Custom rendering pass after opaque geometry. Can be used for example to render the skybox.
- - postalpha: Custom rendering pass after transparent geometry.
- - prepass: %Light pre-pass only - renders normals, specular power and depth to the G-buffer.
- - material: %Light pre-pass only - renders opaque geometry final color by combining ambient light, per-vertex lights and per-pixel light accumulation.
- - deferred: Deferred rendering only - renders ambient light and per-vertex lights to the output rendertarget, and diffuse albedo, normals, specular intensity + power and depth to the G-buffer.
- - shadow: Renders depth only for shadow map generation.
- By default draw calls within passes are sorted by render state, but transparent base and light passes, as well as the postalpha pass, are sorted by distance back to front.
- Note that the technique does not need to enumerate shaders used for different geometry types (non-skinned, skinned, instanced, billboard) and different per-vertex and per-pixel light combinations. Instead specific hardcoded shader variations are assumed to exist. See the files Ambient.xml and ForwardLit.xml in either SourceAssets/HLSLShaders or SourceAssets/GLSLShaders to see which variations are required.
- The optional "litbase" pass reduces draw call count by combining ambient lighting with the first per-pixel light affecting an object. However, it has intentional limitations to not require too many shader permutations: there must be no vertex lights affecting the object, and the ambient lighting can not have a gradient. In case of excessive overdraw, it is possibly better not to define it, but instead allow the base pass (which is computationally very lightweight) to run first, initializing the Z buffer for later passes.
- \page Lights Lights and shadows
- Lights in Urho3D can be directional, point, or spot lights. Shadow mapping is supported for all light types.
- A directional light's position has no effect, as it's assumed to be infinitely far away, only its rotation matters. It casts orthographically projected shadows. For increasing the shadow quality, cascaded shadow mapping (splitting the view into several shadow maps along the Z-axis) can be used.
- Point lights are spherical in shape. When a point light casts shadows, it will be internally split into 6 spot lights with a 90 degree FOV each. This is very expensive rendering-wise, so shadow casting point lights should be used sparingly.
- Spot lights have FOV & aspect ratio values like cameras to define the shape of the light cone.
- Both point and spot lights use an attenuation ramp texture to determine how the intensity varies with distance. In addition they have a shape texture, 2D for spot lights, and an optional cube texture for point lights. It is important that the spot light's shape texture has black at the borders, and has mipmapping disabled, otherwise there will be "bleeding" artifacts at the edges of the light cone.
- \section Lights_LightCulling Light culling
- When occlusion is used, a light will automatically be culled if its bounding box is fully behind an occluder. However, directional lights have an infinite bounding box, and can not be culled this way.
- It is possible to limit which objects are affected by each light, by calling \ref Drawable::SetLightMask "SetLightMask()" on both the light and the objects. The lightmasks of the light and objects are ANDed to check whether the light should have effect: the light will only illuminate an object if the result is nonzero. By default objects and lights have all bits set in their lightmask, thus passing this test always.
- \ref Zone "Zones" can also be used for light culling. When an object is inside a zone, its lightmask will be ANDed with the zone's lightmask before testing it against the lights' lightmasks. Using this mechanism, objects can change their accepted light set dynamically as they move through the scene.
- Care must be utilized when doing light culling with lightmasks, because they easily create situations where a light's influence is cut off unnaturally. However, they can be helpful in preventing light spill into undesired areas, for example lights inside one room bleeding into another, without having to resort into shadow-casting lights.
- In light pre-pass and deferred rendering, light culling happens by writing the objects' lightmasks to the stencil buffer during G-buffer rendering, and comparing the stencil buffer to the light's light mask when rendering light volumes. In this case lightmasks are limited to the low 8 bits only.
- \section Lights_ShadowedLights Shadowed lights
- Shadow rendering is easily the most complex aspect of using lights, and therefore a wide range of per-light parameters exists for controlling the shadows:
- - BiasParameters: define constant and slope-scaled depth bias values for preventing self-shadowing artifacts. In practice, need to be determined experimentally. Orthographic (directional) and projective (point and spot) shadows may require rather different bias values. Another way of fighting self-shadowing issues is to render shadowcaster backfaces, see \ref Rendering_Materials "Materials".
- - CascadeParameters: these have effect only for directional lights. They specify the far clip distance of each of the cascaded shadow map splits (maximum 4), and the fade start point relative to the maximum shadow range. Unused splits can be set to far clip 0.
- - FocusParameters: these have effect for directional and spot lights, and control techniques to increase shadow map resolution. They consist of focus enable flag (allows focusing the shadow camera on the visible shadow casters & receivers), nonuniform scale enable flag (allows better resolution), automatic size reduction flag (reduces shadow map resolution when the light is far away), and quantization & minimum size parameters for the shadow camera view.
- Additionally there are shadow fade distance, shadow intensity, shadow resolution and shadow near/far ratio parameters:
- - If both shadow distance and shadow fade distance are greater than zero, shadows start to fade at the shadow fade distance, and vanish completely at the shadow distance.
- - Shadow intensity defines how dark the shadows are, between 0.0 (maximum darkness, the default) and 1.0 (fully lit.)
- - The shadow resolution parameter scales the global shadow map size set in Renderer to determine the actual shadow map size. Maximum is 1.0 (full size) and minimum is 0.125 (one eighth size.) Choose according to the size and importance of the light; smaller shadow maps will be less performance hungry.
- - The shadow near/far ratio controls shadow camera near clip distance for point & spot lights. The default ratio is 0.002, which means a light with range 100 would have its shadow camera near plane set at the distance of 0.2. Set this as high as you can for better shadow depth resolution, but note that the bias parameters will likely have to be adjusted as well.
- Finally, there are global settings for the shadow map base resolution and shadow map depth (16 or 24 bit) & filtering quality (1 or 4 samples) in Renderer.
- \section Lights_ShadowCulling Shadow culling
- Similarly to light culling with lightmasks, shadowmasks can be used to select which objects should cast shadows with respect to each light. See \ref Drawable::SetShadowMask "SetShadowMask()". A potential shadow caster's shadow mask will be ANDed with the light's lightmask to see if it should be rendered to the light's shadow map. Also, when an object is inside a zone, its shadowmask will be ANDed with the zone's shadowmask as well. By default all bits are set in the shadowmask.
- For an example of shadow culling, imagine a house (which itself is a shadow caster) containing several objects inside, and a shadowed directional light shining in from the windows. In that case shadow map rendering can be avoided for objects already in shadow by clearing the respective bit from their shadowmasks.
- \section Lights_ShadowMapReuse Shadow map reuse
- The Renderer can be configured to either reuse shadow maps, or not. To reuse is the default, use \ref Renderer::SetReuseShadowMaps "SetReuseShadowMaps()" to change.
- When reuse is enabled, only one shadow texture of each shadow map size needs to be reserved, and shadow maps are rendered "on the fly" before rendering a single shadowed light's contribution onto opaque geometry. This has the downside that shadow maps are no longer available during transparent geometry rendering, so transparent objects will not receive shadows.
- When reuse is disabled, all shadow maps are rendered before the actual scene rendering. Now multiple shadow textures need to be reserved based on the number of simultaneous shadow casting lights. See the function \ref Renderer::SetNumShadowMaps "SetNumShadowMaps()". If there are not enough shadow textures, they will be assigned to the closest/brightest lights, and the rest will be rendered unshadowed. Now more texture memory is needed, but the advantage is that also transparent objects can receive shadows.
- \page SkeletalAnimation Skeletal animation
- There are two ways to play skeletal animations. Either manually, by adding or removing animation states to the AnimatedModel, and advancing their time positions & weights, see \ref AnimatedModel::AddAnimationState "AddAnimationState()", \ref AnimatedModel::RemoveAnimationState "RemoveAnimationState()", \ref AnimationState::AddTime "AddTime()" and \ref AnimationState::SetWeight "SetWeight()". Alternatively the helper component AnimationController can be used by adding it into the same Node as the AnimatedModel, and using its functions, such as \ref AnimationController::Play "Play()" and \ref AnimationController::Stop "Stop()". AnimationController will advance the animations automatically during scene update. It also enables automatic network synchronization of animations, which the AnimatedModel does not do on its own.
- Note that AnimationController does not by default stop non-looping animations automatically once they reach the end, so their final pose will stay in effect. Rather they must either be stopped manually, or the \ref AnimationController::SetAutoFade "SetAutoFade()" function can be used to make them automatically fade out once reaching the end.
- \section SkeletalAnimation_Blending Animation blending
- %Animation blending uses the concept of numbered layers. Layer numbers are unsigned 8-bit integers, and the active \ref AnimationState "AnimationStates" on each layer are processed in order from the lowest layer to the highest. As animations are applied by lerp-blending between absolute bone transforms, the effect is that the higher layer numbers have higher priority, as they will remain in effect last.
- By default an Animation is played back by using all the available bone tracks. However an animation can be only partially applied by setting a start bone, see \ref AnimationState::SetStartBone "SetStartBone()". Once set, the bone tracks will be applied hierarchically starting from the start bone. For example, to apply an animation only to a bipedal character's upper body, which is typically parented to the spine bone, one could set the spine as the start bone.
- \page Particles %Particle systems
- The ParticleEmitter class derives from BillboardSet to implement a particle system that updates automatically.
- The particle system's properties can be set through a XML description file, see \ref ParticleEmitter::LoadParameters "LoadParameters()".
- Most of the parameters can take either a single value, or minimum and maximum values to allow for random variation. See below for all supported parameters:s
- \code
- <particleemitter>
- <material name="MaterialName" />
- <updateinvisible enable="true|false" />
- <relative enable="true|false" />
- <scaled enable="true|false" />
- <sorted enable="true|false" />
- <emittertype value="point|box|sphere" />
- <emittersize value="x y z" />
- <direction min="x1 y1 z1" max="x2 y2 z2" />
- <constantforce value="x y z" />
- <dampingforce value="x" />
- <activetime value="t" />
- <inactivetime value="t" />
- <interval min="t1" max="t2" />
- <particlesize min="x1 y1" max="x2 y2" />
- <timetolive min="t1" max="t2" />
- <velocity min="x1" max="x2" />
- <rotation min="x1" max="x2" />
- <rotationspeed min="x1" max="x2" />
- <sizedelta add="x" mul="y" />
- <color value="r g b a" />
- <colorfade color="r g b a" time="t" />
- </particleemitter>
- \endcode
- Note: zero active or inactive time period means infinite. Instead of defining a single color element, several colorfade elements can be defined in time order to describe how the particles change color over time.
- \page Zones Zones
- A Zone controls ambient lighting and fogging. Each geometry object determines the zone it is inside (by testing against the zone's oriented bounding box) and uses that zone's ambient light color, fog color and fog start/end distance for rendering. For the case of multiple overlapping zones, zones also have an integer priority value, and objects will choose the highest priority zone they touch.
- The viewport will be initially cleared to the fog color of the zone found at the camera's far clip distance. If no zone is found either for the far clip or an object, a default zone with black ambient and fog color will be used.
- Zones have two special flags: override mode and ambient gradient. If the camera is inside a zone with override mode enabled, all rendered objects will use that zone's ambient and fog settings, instead of the zone they belong to. This can be used for example to implement an underwater effect. When ambient gradient mode is enabled, the zone's own ambient color value is not used, but instead it will look for two highest-priority neighbor zones that touch it at the minimum and maximum Z face of its oriented bounding box: any objects inside will then get a per-vertex ambient color fade between the neighbor zones' ambient colors.
- Zones also define a lightmask and a shadowmask (with all bits set by default.) An object's final lightmask for light culling is determined by ANDing the object lightmask and the zone lightmask. The final shadowmask is also calculated in the same way.
- \page Postprocessing Post-processing
- After a viewport's 3D scene content is rendered, post-processing effects can be applied to it. These are viewport-sized quads rendered with configurable vertex and pixel shaders, shader parameters and textures.
- The Viewport contains a vector of PostProcess pointers, into which they can be added, see \ref Viewport::AddPostProcess "AddPostProcess()" or \ref Viewport::InsertPostProcess "InsertPostProcess()". They are applied in order from first to last. Each post-processing effect can consist of a number of passes, represented by the class PostProcessPass. The last pass of the last effect will be rendered to the destination rendertarget, while the passes before will be rendered to an internal ping-pong buffer.
- In addition to configuring programmatically, the PostProcess can be configured with an XML file; use \ref PostProcess::LoadParameters "LoadParameters()" to specify the XML file to use. The format is the following:
- \code
- <postprocess>
- <rendertarget name="RenderTargetName" size="x y" | sizedivisor="x y" format="rgb|rgba|float" filter="true|false" />
- <parameter name="GlobalShaderParameterName" value="x y z w" />
- <pass vs="VertexShaderName" ps="PixelShaderName" output="viewport|RenderTargetName" />
- <texture unit="diffuse|normal|specular|detail|environment|emissive" name="viewport|RenderTargetName|TextureName" />
- <parameter name="ShaderParameterName" value="x y z w" />
- </pass>
- </postprocess>
- \endcode
- The additional rendertargets defined by a post-process effect are temporary and allocated on-demand from the Renderer. They can either specify an absolute width and height, or a divisor for the viewport size (for example sizedivisor="2 2" would divide the viewport dimensions by two.) A pass can output either into a defined rendertarget or into the viewport. The current contents of the viewport (either the scene render or a previous post-process pass) can also be read as a texture; this will always use the other half of the ping-pong buffer to avoid sampling the texture being rendered.
- %Shader parameters can be defined either as global for all passes, or as pass-specific. In addition to the user-defined shader parameters, the shader parameters GBufferOffsets and GBufferInvSize will always be set according to the viewport coordinates and the destination rendertarget inverse size, so that the GetScreenPos() shader function can operate correctly, and adjacent pixels can be sampled. See the EdgeFilter shader for an example. For the additional rendertargets the shader parameters <RenderTargetName>Offsets and <RenderTargetName>InvSize will be set (if they exist) to allow to sample also these rendertargets properly. In this case the "offsets" parameter represents the half-pixel UV offset needed to sample whole pixels on Direct3D9 only; on OpenGL it will be zero.
- Note that when hardware multisampling is used in conjunction with post-processing, the multisampled backbuffer will first be resolved to the ping-pong buffer before rendering the post-process passes. This has a small performance cost.
- \page AuxiliaryViews Auxiliary views
- Auxiliary views are viewports defined into a RenderSurface. These will be rendered whenever the texture containing the surface is visible, and can be typically used to implement for example reflections. The texture in question must have been created in rendertarget mode, see Texture's \ref Texture2D::SetSize "SetSize()" function.
- The viewport is not assigned directly to the texture because of cube map support: a renderable cube map has 6 render surfaces, and done this way, a different camera could be assigned to each.
- A "backup texture" can be assigned to the rendertarget texture: because it is illegal to sample a texture that is also being simultaneously rendered to (in cases where the texture becomes "recursively" visible in the auxiliary view), the backup texture can be used to specify which texture should be used in place instead.
- Rendering detailed auxiliary views can easily have a large performance impact. Some things you can do for optimization with the auxiliary view camera:
- - Set the far clip distance as small as possible.
- - Set the camera's viewmask to for example VIEW_REFLECTION, then clear that viewmask bit from objects you don't need rendered.
- - Use the camera's \ref Camera::SetViewOverrideFlags "SetViewOverrideFlags()" function to disable shadows, to disable occlusion, or force the lowest material quality.
- \page Input %Input
- The Input subsystem provides keyboard and mouse input via both a polled interface and events. It is always instantiated, even in headless mode, but is active only once the application window has been created. Once active, the subsystem takes over the application mouse cursor. It will be hidden, so the UI should be used to render a software cursor if necessary.
- The input events include:
- - E_MOUSEBUTTONUP: a mouse button has been released.
- - E_MOUSEBUTTONDOWN: a mouse button has been pressed.
- - E_MOUSEMOVE: the mouse has been moved.
- - E_MOUSEWHEEL: the mouse wheel has been moved.
- - E_KEYUP: a key has been released.
- - E_KEYDOWN: a key has been pressed.
- - E_CHAR: translation of a keypress to Latin-1 charset for text entry. This is currently the only way to get translated key input.
- The input polling API differentiates between the initiation of a key/mouse button press, and holding the key or button down. \ref Input::GetKeyPress "GetKeyPress()" and \ref Input::GetMouseButtonPress "GetMouseButtonPress()" return true only for one frame (the initiation) while \ref Input::GetKeyDown "GetKeyDown()" and \ref Input::GetMouseButtonDown "GetMouseButtonDown()" return true as long as the key or button is held down.
- From the input subsystem you can also query whether the application is active/inactive, or minimized.
- In script, the polling API is accessed via properties: input.keyDown[], input.keyPress[], input.mouseButtonDown[], input.mouseButtonPress[], input.mouseMove.
- \page Audio %Audio
- The Audio subsystem implements an audio output stream using DirectSound. DirectSound requires a window handle, so sound can not be played back before the application window has been opened. Once playing, the following operations are supported:
- - Playing raw audio, Ogg Vorbis or WAV Sound resources using the SoundSource component. This allows manual stereo panning of mono sounds; stereo sounds will be output with their original stereo mix.
- - Playing the above sound formats in pseudo-3D using the SoundSource3D component. It has stereo positioning and distance attenuation, but does not (at least yet) filter the sound depending on the direction.
- For pseudo-3D positional sounds, the listener position and rotation have to be updated by calling \ref Audio::SetListenerPosition "SetListenerPosition()" and \ref Audio::SetListenerRotation "SetListenerRotation()".
- The output is software mixed for an unlimited amount of simultaneous sounds. Ogg Vorbis sounds are decoded on the fly, and decoding them can be memory- and CPU-intensive, so WAV files are recommended when a large number of short sound effects need to be played.
- For purposes of volume control, each SoundSource is classified into one of three categories:
- - %Sound effects
- - Music
- - Voice
- A master gain category also exists that affects the final output level. To control the category volumes, use \ref Audio::SetMasterGain "SetMasterGain()".
- The SoundSource components support automatic removal from the node they belong to, once playback is finished. To use, call \ref SoundSource::SetAutoRemove "SetAutoRemove()" on them. This may be useful when a game object plays several "fire and forget" sound effects.
- \section Audio_Parameters Sound parameters
- A standard WAV file can not tell whether it should loop, and raw audio does not contain any header information. Parameters for the Sound resource can optionally be specified through an XML file that has the same name as the sound, but .xml extension. Possible elements and attributes are described below:
- \code
- <sound>
- <format frequency="x" sixteenbit="true|false" stereo="true|false" />
- <loop enable="true|false" start="x" end="x" />
- </sound>
- \endcode
- The frequency is in Hz, and loop start and end are bytes from the start of audio data. If a loop is enabled without specifying the start and end, it is assumed to be the whole sound. Ogg Vorbis compressed sounds do not support specifying the loop range, only whether whole sound looping is enabled or disabled.
- The Audio subsystem is always instantiated, but in headless mode it is not active. In headless mode the playback of sounds is simulated, taking the sound length and frequency into account. This allows basing logic on whether a specific sound is still playing or not, even in server code.
- \page Physics Physics
- The %Physics library in Urho3D implements rigid body physics simulation using the Bullet library.
- To use, a PhysicsWorld component must first be created to the Scene.
- The physics simulation has its own fixed update rate, which by default is 60Hz. When the rendering framerate is higher than the physics update rate, physics motion is interpolated so that it always appears smooth. The update rate can be changed with \ref PhysicsWorld::SetFps "SetFps()" function. The physics update rate also determines the frequency of fixed timestep scene logic updates.
- The other physics components are:
- - RigidBody: a physics object instance. Its parameters include mass, linear/angular velocities, friction and restitution.
- - CollisionShape: defines physics collision geometry. The supported shapes are box, sphere, cylinder, capsule, cone, triangle mesh and convex hull.
- - Joint: connects two RigidBodies together, or one RigidBody to a static point in the world. Currently ball and hinge joints are supported.
- Both a RigidBody and at least one CollisionShape component must exist in a scene node for it to behave physically (a collision shape by itself does nothing.) Several collision shapes may exist in the same node to create compound shapes. An offset position and rotation relative to the node's transform can be specified for each. Triangle mesh and convex hull geometries require specifying a Model resource and the LOD level to use.
- CollisionShape provides two APIs for defining the collision geometry. Either setting individual properties such as the \ref CollisionShape::SetShapeType "shape type" or \ref CollisionShape::SetSize "size", or specifying both the shape type and all its properties at once: see for example \ref CollisionShape::SetBox "SetBox()", \ref CollisionShape::SetCapsule "SetCapsule()" or \ref CollisionShape::SetTriangleMesh "SetTriangleMesh()".
- RigidBodies can be either static or moving. A body is static if its mass is 0, and moving if the mass is greater than 0.
- The collision behaviour of a rigid body is controlled by several variables. First, the collision layer and mask define which other objects to collide with: see \ref RigidBody::SetCollisionLayer "SetCollisionLayer()" and \ref RigidBody::SetCollisionMask "SetCollisionMask()". By default a rigid body is on layer 1; the layer will be ANDed with the other body's collision mask to see if the collision should be reported. A rigid body can also be set to \ref RigidBody::SetPhantom "phantom mode" to only report collisions without actually applying collision forces. Finally, the \ref RigidBody::SetFriction "friction" and \ref RigidBody::SetRestitution "restitution" coefficients (between 0 - 1) control how kinetic energy is transferred in the collisions.
- By default rigid bodies can move and rotate about all 3 coordinate axes when forces are applied. To limit the movement, use \ref RigidBody::SetLinearFactor "SetLinearFactor()" and \ref RigidBody::SetAngularFactor "SetAngularFactor()" and set the axes you wish to use to 1 and those you do not wish to use to 0. For example moving humanoid characters are often represented by a capsule shape: to ensure they stay upright and only rotate when you explicitly set the rotation in code, set the angular factor to 0, 0, 0.
- To prevent tunneling of a fast moving rigid body through obstacles, continuous collision detection can be used. It approximates the object as a swept sphere, but has a performance cost, so it should be used only when necessary. Call \ref RigidBody::SetCcdRadius "SetCcdRadius()" and \ref RigidBody::SetCcdMotionThreshold "SetCcdMotionThreshold()" with non-zero values to enable. To prevent false collisions, the body's actual collision shape should completely contain the radius. The motion threshold is the required motion per simulation step for CCD to kick in: for example a box with size 1 should have motion threshold 1 as well.
- All physics calculations are performed in world space. Nodes containing a RigidBody component should be parented to the Scene (root node) to ensure correct operation.
- The physics world sends 3 types of events during its update step:
- - E_PHYSICSPRESTEP before the simulation is stepped.
- - E_PHYSICSCOLLISION (and E_NODECOLLISION to the participating scene nodes) for each collision during the simulation step.
- - E_PHYSICSPOSTSTEP after the simulation has been stepped.
- Note that if the rendering framerate is high, the physics might not be stepped at all on each frame: in that case those events will not be sent.
- \page UI User interface
- Urho3D implements a simple, hierarchical user interface system based on rectangular elements. The elements provided by default are:
- - BorderImage: a texture image with an optional border
- - Button: a pushbutton
- - CheckBox: a button that can be toggled on/off
- - Cursor: a mouse cursor
- - DropDownList: shows a vertical list of items (optionally scrollable) as a popup
- - LineEdit: a single-line text editor
- - ListView: shows a scrollable vertical list of items
- - Menu: a button which can show a popup element
- - ScrollBar: a slider with back and forward buttons
- - ScrollView: a scrollable view of child elements
- - Slider: a horizontal or vertical slider bar
- - Text: static text that can be multiline
- - UIElement: container for other elements, renders nothing by itself
- - Window: a movable and resizable window
- The root UI element can be queried from the UI subsystem. It is an empty canvas (UIElement) as large as the application window, into which other elements can be added.
- Elements are added into each other similarly as scene nodes, using the \ref UIElement::AddChild "AddChild()" and \ref UIElement::RemoveChild "RemoveChild()" functions. Each UI element has also a \ref UIElement::GetVars "user variables" VariantMap for storing custom data.
- \section UI_Defining Defining UI elements in XML
- Each UI element knows to load its properties from an XML file. There are two distinct use cases for this: either defining just the UI element style and leaving the actual position and dimensions to be filled in later, or fully defining a set of UI elements. For an example of defining element styles, see the file Data/UI/DefaultStyle.xml.
- The function \ref UI::LoadLayout "LoadLayout()" in UI will take an XML file and instantiate the elements defined in it. To be valid XML, there should be one root UI element. An optional style XML file can be specified; the idea is to first read the element's style from that file, then fill in the rest from the actual layout XML file. This way the layout file can be relatively simple, as the majority of the data is already defined.
- The XML data for each UI element follows. Everything is optional and defaults will be used if missing. Note the redundant ways in which to define element size. Also note the element class hierarchy; for example a Button derives from BorderImage, and all elements derive from UIElement. See the comments in the elements' header files for descriptions of each property.
- \subsection UI_UIElement UIElement
- \code
- <element name="ElementName" type="UIElement" >
- <position value="x y" />
- <size value="x y" />
- <width value="x" />
- <height value="y" />
- <minsize value="x y" />
- <minwidth value="x" />
- <minheight value="y" />
- <maxsize value="x y" />
- <maxwidth value="x" />
- <maxheight value="y" />
- <fixedsize value="x y" />
- <fixedwidth value="x" />
- <fixedheight value="y" />
- <alignment horizontal="left|center|right" vertical="top|center|bottom" />
- <clipborder value="l t r b" />
- <priority value="p" />
- <opacity value="o" />
- <color value="r g b a" | topleft="r g b a" topright="r g b a" bottomleft="r g b a" bottomright="r g b a" />
- <bringtofront enable="true|false" />
- <bringtoback enable="true|false" />
- <clipchildren enable="true|false" />
- <enabled enable="true|false" />
- <selected enable="true|false" />
- <visible enable="true|false" />
- <focusmode value="notfocusable|resetfocus|focusable|focusabledefocusable" />
- <layout mode="free|horizontal|vertical" spacing="s" border="l t r b" />
- <vars>
- <variant name="n" type="t" value="v" />
- ...
- </vars>
- </element>
- \endcode
- \subsection UI_BorderImage BorderImage
- \code
- <element type="BorderImage">
- <texture name="TextureName" />
- <imagerect value="l t r b" />
- <border value="l t r b" />
- <hoveroffset value="x y" />
- </element>
- \endcode
- \subsection UI_Button Button
- \code
- <element type="Button">
- <pressedoffset value="x y" />
- <labeloffset value="x y" />
- <repeat delay="d" rate="r" />
- </element>
- \endcode
- \subsection UI_Checkbox Checkbox
- \code
- <element type="Checkbox">
- <checkedoffset value="x y" />
- </element>
- \endcode
- \subsection UI_Cursor Cursor
- \code
- <element type="Cursor">
- <shape name="normal|resizevertical|resizediagonal_topright|resizehorizontal|resizediagonal_topleft|acceptdrop|rejectdrop"
- texture="TextureName" imagerect="l t r b" hotspot="x y" />
- ...
- </element>
- \endcode
- \subsection UI_Menu Menu
- If a popup element is specified, it will be searched for by name from the UI element hierarchy.
- \code
- <element type="Menu">
- <popup name="ElementName" />
- <popupoffset value="x y" />
- </element>
- \endcode
- \subsection UI_Text Text
- \code
- <element type="Text">
- <font name="FontName" size="s" />
- <text value="..." />
- <textalignment value="left|center|right" />
- <rowspacing value="s" />
- <selection start="s" length="l" />
- <selectioncolor value="r g b a" />
- <hovercolor value="r g b a" />
- </element>
- \endcode
- \subsection UI_Window Window
- \code
- <element type="Window">
- <resizeborder value="l t r b" />
- <movable enable="true|false" />
- <resizable enable="true|false" />
- </element>
- \endcode
- \subsection UI_DropDownList DropDownList
- The styles of the listview, popup and placeholder sub-elements can be specified within the respective XML elements. The listview can be pre-filled by specifying popup items; they will be searched for by name from the UI element hierarchy.
- \code
- <element type="DropDownList">
- <selection value="s" />
- <resizepopup enable="true|false" />
- <listview />
- <popup />
- <placeholder />
- <popupitem name="ElementName" />
- ...
- </element>
- \endcode
- \subsection UI_LineEdit LineEdit
- The style of the cursor sub-element can specified with the "cursor" XML element.
- \code
- <element type="LineEdit">
- <maxlength value="l" />
- <cursormovable enable="true|false" />
- <textselectable enable="true|false" />
- <textcopyable enable="true|false" />
- <text value="..." />
- <cursorposition value="p" />
- <cursorblinkrate value="r" />
- <echocharacter value="c" />
- <cursor />
- </element>
- \endcode
- \subsection UI_Slider Slider
- The style of the knob sub-element can specified with the "knob" XML element.
- \code
- <element type="Slider">
- <orientation value="horizontal|vertical" />
- <range max="m" value="v" />
- <knob />
- </element>
- \endcode
- \subsection UI_ScrollBar ScrollBar
- The styles of the back button, forward button and the slider can be specified with the respective XML elements. Note the buttons' nonstandard imagerect element, which specifies the image to use for both a horizontal and a vertical button.
- \code
- <element type="ScrollBar">
- <orientation value="horizontal|vertical" />
- <range max="m" value="v" />
- <scrollstep value="s" />
- <stepfactor value="f" />
- <backbutton>
- <imagerect horizontal="l t r b" vertical="l t r b" />
- </backbutton>
- <forwardbutton>
- <imagerect horizontal="l t r b" vertical="l t r b" />
- </forwardbutton>
- <slider />
- </element>
- \endcode
- \subsection UI_ScrollView ScrollView
- The styles of the horizontal and vertical scrollbars can be specified with the respective XML elements. If a content element is specified, it will be searched for by name from the UI element hierarchy.
- \code
- <element type="ScrollView">
- <viewposition value="x y" />
- <scrollstep value="s" />
- <pagestep value="p" />
- <horizontalscrollbar />
- <verticalscrollbar />
- <contentelement name="ElementName" />
- </element>
- \endcode
- \subsection UI_ListView ListView
- \code
- <element type="ListView">
- <selection value="s" />
- <highlight value="never|focus|always" />
- <multiselect enable="true|false" />
- <hierarchy enable="true|false" />
- <clearselection enable="true|false" />
- <doubleclickinterval value="i" />
- </element>
- \endcode
- \section UI_Layouts UI element layout
- By default %UI elements operate in a "free" layout mode, where child elements' positions can be specified relative to any of the parent element corners, but they are not automatically positioned or resized.
- To create automatically adjusting layouts, the layout mode can be switched to either "horizontal" or "vertical". Now the child elements will be positioned left to right or top to bottom, based on the order in which they were added. They will be preferably resized to fit the parent element, taking into account their minimum and maximum sizes, but failing to do that, the parent element will be resized.
- Left, top, right & bottom border widths and spacing between elements can also be specified for the layout. A grid layout is not directly supported, but it can be manually created with a horizontal layout inside a vertical layout, or vice versa.
- \page Serialization Serialization
- Classes that derive from Serializable can perform automatic serialization to binary or XML format by defining \ref AttributeInfo "attributes". Attributes are stored to the Context per class. %Scene load/save and network replication are both implemented by having the Node and Component classes derive from Serializable.
- The supported attribute types are all those supported by Variant. Attributes can either define a direct memory offset into the object, or setter & getter functions. Zero-based enumerations are also supported, so that the enum values can be stored as text into XML files instead of just numbers. For editing, the attributes also have human-readable names.
- To implement side effects to attributes, for example that a Node needs to dirty its world transform whenever the local transform changes, the default attribute access functions in Serializable can be overridden. See \ref Serializable::OnSetAttribute "OnSetAttribute()" and \ref Serializable::OnGetAttribute "OnGetAttribute()".
- Each attribute can have a combination of the following flags:
- - AM_FILE: Is used for file serialization (load/save.)
- - AM_NET: Is used for network replication.
- - AM_LATESTDATA: Frequently changing data for network replication, where only the latest values matter. Used for motion and animation.
- - AM_NOEDIT: Is an internal attribute and is not to be shown for editing.
- The default flags are AM_FILE and AM_NET.
- \page Network Networking
- The Network library provides reliable and unreliable UDP messaging using kNet. A server can be created that listens for incoming connections, and client connections can be made to the server. After connecting, code running on the server can assign the client into a scene to enable scene replication, provided that when connecting, the client specified a blank scene for receiving the updates.
- %Scene replication is one-directional: the server always has authority and sends scene updates to the client at a fixed update rate, by default 30 FPS. The client responds by sending controls updates (buttons, yaw and pitch + possible extra data) also at a fixed rate.
- Bidirectional communication between the server and the client can happen either using raw network messages, which are binary-serialized data, or remote events, which operate like ordinary events, but are processed on the receiving end only. Code on the server can send messages or remote events either to one client, all clients assigned into a particular scene, or to all connected clients. In contrast the client can only send messages or remote events to the server, not directly to other clients.
- Note that if a particular networked application does not need scene replication, network messages (and remote events that are not targeted) can also be transmitted without assigning the client to a scene. The Chat example does just that: it does not create a scene either on the server or the client.
- \section Network_Connecting Connecting to a server
- Starting the server and connecting to it both happen through the Network subsystem. See \ref Network::StartServer "StartServer()" and \ref Network::Connect "Connect()". A UDP port must be chosen; the examples use the port 1234.
- Note the scene (to be used for replication) and identity VariantMap supplied as parameters when connecting. The identity data can contain for example the user name or credentials, it is completely application-specified. The identity data is sent right after connecting and causes the E_CLIENTIDENTITY event to be sent on the server when received. By subscribing to this event, server code can examine incoming connections and accept or deny them. The default is to accept all connections.
- After connecting successfully, client code can get the Connection object representing the server connection, see \ref Network::GetServerConnection "GetServerConnection()". Likewise, on the server a Connection object will be created for each connected client, and these can be iterated through. This object is used to send network messages or remote events to the remote peer, to assign the client into a scene (on the server only), or to disconnect.
- \section Network_Replication Scene replication
- %Network replication of scene content has been implemented in a straightforward manner, using \ref Serialization "attributes". Nodes and components that have been not been created in local mode - see the CreateMode parameter of \ref Node::CreateChild "CreateChild()" or \ref Node::CreateComponent "CreateComponent()" - will be automatically replicated. Note that a replicated component created into a local node will not be replicated, as the node's locality is checked first.
- The CreateMode translates into two different node and component ID ranges - replicated ID's range from 0x1 to 0xffffff, while local ID's range from 0x1000000 to 0xffffffff. This means there is a maximum of 16777215 replicated nodes or components in a scene.
- If the scene was originally loaded from a file on the server, the client will also load the scene from the same file first. In this case all predefined, static objects such as the world geometry should be defined as local nodes, so that they are not needlessly retransmitted through the network during the initial update, and do not exhaust the more limited replicated ID range.
- The server can be made to transmit needed resource \ref PackageFile "packages" to the client. This requires attaching the package files to the Scene by calling \ref Scene::AddRequiredPackageFile "AddRequiredPackageFile()". On the client, a cache directory for the packages must be chosen before receiving them is possible: see \ref Network::SetPackageCacheDir "SetPackageCacheDir()".
- There are some things to watch out for:
- - After connecting to a server, the client should not create, update or remove non-local nodes or components on its own. However, to create client-side special effects and such, the client can freely manipulate local nodes.
- - A node's \ref Node::GetVars "user variables" VariantMap will be automatically replicated on a per-variable basis. This can be useful in transmitting data shared by several components, for example the player's score or health.
- - To implement interpolation, exponential smoothing of the nodes' rendering transforms is enabled on the client. It can be controlled by two properties of the Scene, the smoothing constant and the snap threshold. Snap threshold is the distance between network updates which, if exceeded, causes the node to immediately snap to the end position, instead of moving smoothly. See \ref Scene::SetSmoothingConstant "SetSmoothingConstant()" and \ref Scene::SetSnapThreshold "SetSnapThreshold()".
- - Position and rotation are Node attributes, while linear and angular velocities are RigidBody attributes. To cut down on the needed network bandwidth the physics components can be created as local on the server: in this case the client will not see them at all, and will only interpolate motion based on the node's transform changes. Replicating the actual physics components allows the client to extrapolate using its own physics simulation, and to also perform collision detection, though always non-authoritatively.
- - By default the physics simulation also performs interpolation to enable smooth motion when the rendering framerate is higher than the physics FPS. This should be disabled on the server scene to ensure that the clients do not receive interpolated and therefore possibly non-physical positions and rotations. See \ref PhysicsWorld::SetInterpolation "SetInterpolation()".
- - AnimatedModel does not replicate animation by itself. Rather, AnimationController will replicate its command state (such as "fade this animation in, play that animation at 1.5x speed.") To turn off animation replication, create the AnimationController as local. To ensure that also the first animation update will be received correctly, always create the AnimatedModel component first, then the AnimationController.
- - Networked attributes can either be in delta update or latest data mode. Delta updates are small incremental changes and must be applied in order, which may cause increased latency if there is a stall in network message delivery eg. due to packet loss. High volume data such as position, rotation and velocities are transmitted as latest data, which does not need ordering, instead this mode simply discards any old data received out of order. Note that node and component creation (when initial attributes need to be sent) and removal can also be considered as delta updates and are therefore applied in order.
- - To avoid going through the whole scene when sending network updates, nodes and components explicitly mark themselves for update when necessary. When writing your own replicated C++ components, call \ref Component::MarkNetworkUpdate "MarkNetworkUpdate()" in member functions that modify any networked attribute.
- - The server update logic orders replication messages so that parent nodes are created and updated before their children. Remote events are queued and only sent after the replication update to ensure that if they target a newly created node, it will already exist on the receiving end. However, it is also possible to specify unordered transmission for a remote event, in which case that guarantee does not hold.
- - Nodes have the concept of the \ref Node::SetOwner "owner connection" (for example the player that is controlling a specific game object), which can be set in server code. This property is not replicated to the client. Messages or remote events can be used instead to tell the players what object they control.
- - At least for now, there is no built-in client-side prediction.
- \section Network_InterestManagement Interest management
- %Scene replication includes a simple, distance-based interest management mechanism for reducing bandwidth use. To use, create the NetworkPriority component to a Node you wish to apply interest management to. The component can be created as local, as it is not important to the clients.
- This component has three parameters for controlling the update frequency: \ref NetworkPriority::SetBasePriority "base priority", \ref NetworkPriority::SetDistanceFactor "distance factor", and \ref NetworkPriority::SetMinPriority "minimum priority".
- A current priority value is calculated on each server update as "base priority - distance factor * distance." Additionally, it can never go lower than the minimum priority. This value is then added to an update accumulator. Whenever the update accumulator reaches 100.0, the attribute changes to the node and its components are sent, and the accumulator is reset.
- The default values are base priority 100.0, distance factor 0.0, and minimum priority 0.0. This means that by default an update is always sent (which is also the case if the node has no NetworkPriority component.) Additionally, there is a rule that the node's owner connection always receives updates at full frequency. This rule can be controlled by calling \ref NetworkPriority::SetAlwaysUpdateOwner "SetAlwaysUpdateOwner()".
- Calculating the distance requires the client to tell its current observer position (typically, either the camera's or the player character's world position.) This is accomplished by the client code calling \ref Connection::SetPosition "SetPosition()" on the server connection.
- For now, creation and removal of nodes is always sent immediately, without consulting interest management. This is based on the assumption that nodes' motion updates consume the most bandwidth.
- \section Network_Controls Client controls update
- The Controls structure is used to send controls information from the client to the server, by default also at 30 FPS. This includes held down buttons, which is an application-defined 32-bit bitfield, floating point yaw and pitch, and possible extra data (for example the currently selected weapon) stored within a VariantMap.
- It is up to the client code to ensure they are kept up-to-date, by calling \ref Connection::SetControls "SetControls()" on the server connection. The event E_NETWORKUPDDATE will be sent to remind of the impending update, and the event E_NETWORKUPDATESENT will be sent after the update. The controls can then be inspected on the server side by calling \ref Connection::GetControls "GetControls()".
- The controls update message also includes the client's observer position for interest management.
- \section Network_Messages Raw network messages
- All network messages have an integer ID. The first ID you can use for custom messages is 22 (lower ID's are either reserved for kNet's or the %Network library's internal use.) Messages can be sent either unreliably or reliably, in-order or unordered. The data payload is simply raw binary data that can be crafted by using for example VectorBuffer.
- To send a message to a Connection, use its \ref Connection::SendMessage "SendMessage()" function. On the server, messages can also be broadcast to all client connections by calling the \ref Network::BroadcastMessage "BroadcastMessage()" function.
- When a message is received, and it is not an internal protocol message, it will be forwarded as the E_NETWORKMESSAGE event. See the Chat example for details of sending and receiving.
- For high performance, consider using unordered messages, because for in-order messages there is only a single channel within the connection, and all previous in-order messages must arrive first before a new one can be processed.
- \section Network_RemoteEvents Remote events
- A remote event consists of its event type (name hash), a flag that tells whether it is to be sent in-order or unordered, and the event data VariantMap. It can optionally target a specific Node in the receiver's scene. This is different from ordinary events, which can optionally target any Object within the execution context.
- To send a remote event to a Connection, use its \ref Connection::SendRemoteEvent "SendRemoteEvent()" function. To broadcast remote events to several connections at once (server only), use Network's \ref Network::BroadcastRemoteEvent "BroadcastRemoteEvent()" function.
- For safety, allowed remote event types should be registered so that a client can not for example trigger an internal render update event on the server. See \ref Network::RegisterRemoteEvent "RegisterRemoteEvent()". Similarly to file paths, as long as no remote event types are registered, all are allowed.
- Like with ordinary events, in script event types are strings instead of name hashes for convenience.
- \page Multithreading Multithreading
- Urho3D uses a task-based multithreading model. The WorkQueue subsystem can be supplied with tasks described by the WorkItem structure, by calling \ref WorkQueue::AddWorkItem "AddWorkItem()". These will be executed in background worker threads. The function \ref WorkQueue::Complete "Complete()" will complete all currently pending tasks, and execute them also in the main thread to make them finish faster.
- On single-core systems no worker threads will be created, and tasks are immediately processed by the main thread instead. In the presence of more cores, a worker thread will be created for each hardware core except one which is reserved for the main thread. Hyperthreaded cores are not included, as creating worker threads also for them leads to unpredictable extra synchronization overhead.
- The work items include a function pointer to call, with the signature "void WorkFunction(const WorkItem* item, unsigned threadIndex)." The thread index ranges from 0 to n, where 0 represents the main thread and n is the number of worker threads created. Its function is to aid in splitting work into per-thread data structures that need no locking. The work item also contains three void pointers: start, end and aux, which can be used to describe a range of sub-work items, and an auxiliary data structure, which may for example be the object that originally queued the work.
- Multithreading is so far not exposed to scripts, and is currently used only in a limited manner: to speed up the preparation of rendering views, including lit object and shadow caster queries, occlusion tests and particle system, animation and skinning updates. Raycasts into the Octree are also threaded, but physics raycasts are not.
- Note that as the Profiler currently manages only a single hierarchy tree, profiling blocks may only appear in main thread code, not in the work functions.
- \page Tools Tools
- \section Tools_AssetImporter AssetImporter
- Tool that loads various 3D formats supported by Open Asset Import Library (http://assimp.sourceforge.net/) and saves Urho3D model, animation, material and scene files out of them. For the list of supported formats, look at http://assimp.sourceforge.net/main_features_formats.html.
- Usage:
- \verbatim
- AssetImporter <command> <input file> <output file> [options]
- Commands:
- model Export a model
- scene Export a scene
- dump Dump scene node structure. No output file is generated
- lod Combine several Urho3D models as LOD levels of the output model
- Syntax: lod <dist0> <mdl0> <dist1 <mdl1> ... <output file>
- Options:
- -b Save scene in binary format, default format is XML
- -i Use local ID's for scene nodes
- -mX Output a material list file X (model mode only)
- -na Do not export animations
- -nm Do not export materials
- -ns Do not create subdirectories for resources
- -nz Do not create a zone and a directional light (scene mode only)
- -pX Set path X for scene resources. Default is output file path
- -rX Use scene node X as root node
- -t Generate tangents to model(s)
- \endverbatim
- \section Tools_GLShaderProcessor GLShaderProcessor
- GLShaderProcessor creates final GLSL source code for vertex and pixel shaders, and enumerates the possible shader combinations. Unlike \ref Tools_ShaderCompiler "ShaderCompiler", compiling the shaders is left to runtime.
- Usage:
- \verbatim
- GLShaderProcessor <definitionfile> <outputpath> [define1] [define2]
- GLSL files will be loaded from definition file directory, and finalized GLSL +
- XML files are saved to the output path, preserving the subdirectory structure.
- \endverbatim
- \section Tools_OgreImporter OgreImporter
- Tool that loads OGRE .mesh.xml and .skeleton.xml files and saves them as Urho3D .mdl (model) and .ani (animation) files. For other 3D formats and whole scene importing, see AssetImporter instead. However that tool does not handle the OGRE formats as completely as this.
- Usage:
- \verbatim
- OgreImporter <input file> <output file> [options]
- Options:
- -a Export animations
- -m Export morphs
- -r Export only rotations from animations
- -s Split each submesh into own vertex buffer
- -t Generate tangents
- \endverbatim
- Note: exporting only bone rotations may help when using an animation in a different model, but if bone position changes have been used for effect, the animation may become less lively. Unpredictable mutilations might result from using an animation in a model not originally intended for, as Urho3D does not specifically attempt to retarget animations.
- \section Tools_PackageTool PackageTool
- PackageTool examines a directory recursively for files and subdirectories, and creates a PackageFile. The package file can be added to the ResourceCache and used as if the files were on a (read-only) filesystem.
- Usage:
- \verbatim
- PackageTool <directory to process> <package name> [basepath]
- \endverbatim
- When PackageTool runs, it will go inside the source directory, then look for subdirectories and any files. Paths inside the package will by default be relative to the source directory, but if an extra path prefix is desired, it can be specified by the optional basepath argument.
- For example, this would convert all the resource files inside the Urho3D Data directory into a package called Data.pak (execute the command from the Bin directory)
- \verbatim
- PackageTool Data Data.pak
- \endverbatim
- \section Tools_RampGenerator RampGenerator
- RampGenerator creates 1D and 2D ramp textures for use in light attenuation and spotlight spot shapes.
- Usage:
- \verbatim
- RampGenerator <output file> <width> <power> [dimensions]
- \endverbatim
- The output is saved in PNG format. The power parameter is fed into the pow() function to determine ramp shape; higher value gives more brightness and more abrupt fade at the edge.
- The texconv tool from the DirectX SDK needs to be available through the system PATH.
- \section Tools_ShaderCompiler ShaderCompiler
- This tool generates HLSL shader permutations using an XML definition file that describes the permutations, and their associated HLSL preprocessor defines.
- The output consists of shader bytecode for each permutation, as well as information of the constant parameters and texture units used. See \ref FileFormats_Shader "Binary shader format" for details.
- Usage:
- \verbatim
- ShaderCompiler <definitionfile> <outputpath> [SM3] [define1] [define2] ..
- HLSL files will be loaded from definition file directory, and binary files will
- be output to the output path, preserving the subdirectory structure.
- \endverbatim
- It is possible to give additional defines from the command line. These will then be present in each permutation. SM3 is a special define which enables compilation of VS3.0 and PS3.0 code, otherwise VS2.0 and PS2.0 code is generated.
- The D3DX library from the DirectX runtime or SDK needs to be installed.
- \page Unicode Unicode support
- The String class supports UTF-8 encoding. However, by default strings are treated as a sequence of bytes without regard to the encoding. There is a separate
- API for operating on Unicode characters, see for example \ref String::LengthUTF8 "LengthUTF8()", \ref String::AtUTF8 "AtUTF8()" and \ref String::SubstringUTF8 "SubstringUTF8()". Urho3D itself needs to be aware of the Unicode characters only in the \ref UI "user interface", when displaying text and manipulating it through user input.
- On Windows, wide char strings are used in all calls to the operating system, such accessing the command line, files, and the window title. The WString class is used as a helper for conversion. On Linux & Mac OS X 8-bit strings are used directly and they are assumed to contain UTF-8.
- Note that \ref FileSystem::ScanDir "ScanDir()" function may return filenames in unnormalized Unicode on Mac OS X. Unicode re-normalization is not yet implemented.
- \page FileFormats Custom file formats
- Urho3D tries to use existing file formats whenever possible, and define custom file formats only when absolutely necessary. Currently used custom file formats are:
- \section FileFormats_Model Binary model format (.mdl)
- \verbatim
- Model geometry and vertex morph data
- byte[4] Identifier "UMDL"
- uint Number of vertex buffers
- For each vertex buffer:
- uint Vertex count
- uint Vertex element mask (determines vertex size)
- uint Morphable vertex range start index
- uint Morphable vertex count
- byte[] Vertex data (vertex count * vertex size)
- uint Number of index buffers
- For each index buffer:
- uint Index count
- uint Index size (2 for 16-bit indices, 4 for 32-bit indices)
- byte[] Index data (index count * index size)
- uint Number of geometries
- For each geometry:
- uint Number of bone mapping entries
- uint[] Bone mapping data, Maps geometry bone indices to global bone indices for HW skinning.
- May be empty, in this case identity mapping will be used.
- uint Number of LOD levels
- For each LOD level:
- float LOD distance
- uint Primitive type (0 = triangle list, 1 = line list)
- uint Vertex buffer index, starting from 0
- uint Index buffer index, starting from 0
- uint Draw range: index start
- uint Draw range: index count
- uint Number of vertex morphs (may be 0)
- For each vertex morph:
- cstring Name of morph
- uint Number of affected vertex buffers
- For each affected vertex buffer:
- uint Vertex buffer index, starting from 0
- uint Vertex element mask for morph data. Only positions, normals & tangents are supported.
- uint Vertex count
- For each vertex:
- uint Vertex index
- Vector3 Position (if included in the mask)
- Vector3 Normal (if included in the mask)
- Vector3 Tangent (if included in the mask)
- Skeleton data
- uint Number of bones (may be 0)
- For each bone:
- cstring Bone name
- uint Parent bone index starting from 0. Same as own bone index for the root bone
- Vector3 Initial position
- Quaternion Initial rotation
- Vector3 Initial scale
- float[12] 4x3 offset matrix for skinning
- byte Bone collision info bitmask. 1 = bounding sphere 2 = bounding box
- If bounding sphere data included:
- float Bone radius
- If bounding box data included:
- Vector3 Bone bounding box minimum
- Vector3 Bone bounding box maximum
- Bounding box data
- Vector3 Model bounding box minimum
- Vector3 Model bounding box maximum
- Geometry center data
- For each geometry:
- Vector3 Geometry center
- \endverbatim
- \section FileFormats_Animation Binary animation format (.ani)
- \verbatim
- byte[4] Identifier "UANI"
- cstring Animation name
- float Length in seconds
- uint Number of tracks
- For each track:
- cstring Track name (practically same as the bone name that should be driven)
- byte Mask of included animation data. 1 = bone positions 2 = bone rotations 4 = bone scaling
- uint Number of keyframes
- For each keyframe:
- float Time position in seconds
- Vector3 Position (if included in data)
- Quaternion Rotation (if included in data)
- Vector3 Scale (if included in data)
- \endverbatim
- Note: animations are stored using absolute bone transformations. Therefore only lerp-blending between animations is supported; additive pose modification is not.
- \section FileFormats_Shader Direct3D9 binary shader format (.vs2, .ps2, .vs3, .ps3)
- \verbatim
- byte[4] Identifier "USHD"
- short Shader type (0 = vertex, 1 = pixel)
- short Shader model (2 or 3)
- uint Number of constant parameters
- For each constant parameter:
- cstring Parameter name
- uint Number of texture units
- For each texture unit:
- cstring Texture unit name
- uint Number of shader variations
- For each variation:
- cstring Variation name
- uint Number of constant parameters in use
- For each constant parameter in use:
- StringHash Parameter name hash
- byte Register index
- byte Number of registers
- uint Number of texture units in use
- For each texture unit in use:
- StringHash Texture unit name hash
- byte Sampler index
- uint Bytecode size
- byte[] Bytecode
- \endverbatim
- \page CodingConventions Coding conventions
- - Class and struct names are in camelcase beginning with an uppercase letter. They should be nouns. For example DebugRenderer, FreeTypeLibrary, Graphics.
- - Functions are likewise in upper-camelcase. For example CreateComponent, SetLinearRestThreshold.
- - Variables are in lower-camelcase. Member variables have an underscore appended. For example numContacts, randomSeed_.
- - Constants and enumerations are in uppercase. For example Vector3::ZERO or PASS_SHADOW.
- - Pointers and references append the * or & symbol to the type without a space in between. For example Drawable* drawable, Serializer& dest.
- - Class definitions proceed in the following order:
- - public constructors and the destructor
- - public virtual functions
- - public non-virtual member functions
- - public static functions
- - public member variables
- - public static variables
- - repeat all of the above in order for protected definitions, and finally private
- - Header files are commented using one-line comments beginning with /// to mark them for Doxygen.
- - Inline functions are defined inside the class definitions where possible, without using the inline keyword.
- */
|