Reference.dox 164 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065
  1. namespace Urho3D
  2. {
  3. /**
  4. \page Containers Container types
  5. Urho3D implements its own string type and template containers instead of using STL. The rationale for this consists of the following:
  6. - Increased performance in some cases, for example when using the PODVector class.
  7. - Guaranteed binary size of strings and containers, to allow eg. embedding inside the Variant object.
  8. - Reduced compile time.
  9. - Straightforward naming and implementation that aids in debugging and profiling.
  10. - Convenient member functions can be added, for example String::Split() or Vector::Compact().
  11. - Consistency with the rest of the classes, see \ref CodingConventions "Coding conventions".
  12. The classes in question are String, Vector, PODVector, List, HashSet and HashMap. PODVector is only to be used when the elements of the vector need no construction or destruction and can be moved with a block memory copy.
  13. The list, set and map classes use a fixed-size allocator internally. This can also be used by the application, either by using the procedural functions AllocatorInitialize(), AllocatorUninitialize(), AllocatorReserve() and AllocatorFree(), or through the template class Allocator.
  14. In script, the String class is exposed as it is. The template containers can not be directly exposed to script, but instead a template Array type exists, which behaves like a Vector, but does not expose iterators. In addition the VariantMap is available, which is a HashMap<ShortStringHash, Variant>.
  15. \page ObjectTypes %Object types and factories
  16. Classes that derive from Object contain type-identification, they can be created through object factories, and they can send and receive \ref Events "events". Examples of these are all Component, Resource and UIElement subclasses. To be able to be constructed by a factory, they need to have a constructor that takes a Context pointer as the only parameter.
  17. %Object factory registration and object creation through factories are directly accessible only in C++, not in script.
  18. The definition of an Object subclass must contain the OBJECT(className) macro. Type identification is available both as text (GetTypeName() or GetTypeNameStatic()) and as a 16-bit hash of the type name (GetType() or GetTypeStatic()).
  19. To register an object factory for a specific type, call the \ref Context::RegisterFactory "RegisterFactory()" template function on Context. You can get its pointer from any Object either via the \ref Object::context_ "context_" member variable, or by calling \ref Object::GetContext "GetContext()". An example:
  20. \code
  21. context_->RegisterFactory<MyClass>();
  22. \endcode
  23. To create an object using a factory, call Context's \ref Context::CreateObject "CreateObject()" function. This takes the 16-bit hash of the type name as a parameter. The created object (or null if there was no matching factory registered) will be returned inside a SharedPtr<Object>. For example:
  24. \code
  25. SharedPtr<Object> newComponent = context_->CreateObject(type));
  26. \endcode
  27. \page Subsystems Subsystems
  28. Any Object can be registered to the Context as a subsystem, by using the function \ref Context::RegisterSubsystem "RegisterSubsystem()". They can then be accessed by any other Object inside the same context by calling \ref Object::GetSubsystem "GetSubsystem()". Only one instance of each object type can exist as a subsystem.
  29. After Engine initialization, the following subsystems will always exist:
  30. - Time: manages frame updates, frame number and elapsed time counting, and controls the frequency of the operating system low-resolution timer.
  31. - WorkQueue: executes background tasks in worker threads.
  32. - FileSystem: provides directory operations.
  33. - Log: provides logging services.
  34. - ResourceCache: loads resources and keeps them cached for later access.
  35. - Network: provides UDP networking and scene replication.
  36. - Input: handles keyboard and mouse input. Will be inactive in headless mode.
  37. - UI: the graphical user interface. Will be inactive in headless mode.
  38. - Audio: provides sound output. Will be inactive if sound disabled.
  39. - Engine: creates the other subsystems and controls the main loop iteration and framerate limiting.
  40. The following subsystems are optional, so GetSubsystem() may return null if they have not been created:
  41. - Profiler: Provides hierarchical function execution time measurement using the operating system performance counter. Exists if profiling has been compiled in (configurable from the root CMakeLists.txt)
  42. - Graphics: Manages the application window, the rendering context and resources. Exists if not in headless mode.
  43. - Renderer: Renders scenes in 3D and manages rendering quality settings. Exists if not in headless mode.
  44. - Script: Provides the AngelScript execution environment. Needs to be created and registered manually.
  45. - Console: provides an interactive AngelScript console and log display. Created by calling \ref Engine::CreateConsole "CreateConsole()".
  46. - DebugHud: displays rendering mode information and statistics and profiling data. Created by calling \ref Engine::CreateDebugHud "CreateDebugHud()".
  47. In script, the subsystems are available through the following global properties:
  48. time, fileSystem, log, cache, network, input, ui, audio, engine, graphics, renderer, script, console, debugHud. Note that WorkQueue and Profiler are not available to script due to their low-level nature.
  49. \page Events Events
  50. The Urho3D event system allows for data transport and function invocation without the sender and receiver having to explicitly know of each other. Both the event sender and receiver must derive from Object. An event receiver must subscribe to each event type it wishes to receive: one can either subscribe to the event coming from any sender, or from a specific sender. The latter is useful for example when handling events from the user interface elements.
  51. Events themselves do not need to be registered. They are identified by 32-bit hashes of their names. Event parameters (the data payload) are optional and are contained inside a VariantMap, identified by 16-bit parameter name hashes. For the inbuilt Urho3D events, event type (E_UPDATE, E_KEYDOWN, E_MOUSEMOVE etc.) and parameter hashes (P_TIMESTEP, P_DX, P_DY etc.) are defined as constants inside include files such as CoreEvents.h or InputEvents.h.
  52. When subscribing to an event, a handler function must be specified. In C++ these must have the signature void HandleEvent(StringHash eventType, VariantMap& eventData). The HANDLER(className, function) macro helps in defining the required class-specific function pointers. For example:
  53. \code
  54. SubscribeToEvent(E_UPDATE, HANDLER(MyClass, MyEventHandler));
  55. \endcode
  56. In script events are identified by their string names instead of name hashes (though these are internally converted to hashes.) %Script event handlers can either have the same signature as in C++, or a simplified signature void HandleEvent() when event type and parameters are not required. The same event subscription would look like:
  57. \code
  58. SubscribeToEvent("Update", "MyEventHandler");
  59. \endcode
  60. In C++ events must always be handled by a member function. In script procedural event handling is also possible; in this case the ScriptFile where the event handler function is located becomes the event receiver. See \ref Scripting "Scripting" for more details.
  61. Events can also be unsubscribed from. See \ref Object::UnsubscribeFromEvent "UnsubscribeFromEvent()" for details.
  62. To send an event, fill the event parameters (if necessary) and call \ref Object::SendEvent "SendEvent()". For example, this (in C++) is how the Engine subsystem sends the Update event on each frame. Note how for the inbuilt Urho3D events, the parameter name hashes are always put inside a namespace (the event's name) to prevent name clashes:
  63. \code
  64. using namespace Update;
  65. VariantMap eventData;
  66. eventData[P_TIMESTEP] = timeStep_;
  67. SendEvent(E_UPDATE, eventData);
  68. \endcode
  69. In script event parameters, like event types, are referred to with strings, so the same code would look like:
  70. \code
  71. VariantMap eventData;
  72. eventData["TimeStep"] = timeStep;
  73. SendEvent("Update", eventData);
  74. \endcode
  75. \section Events_AnotherObject Sending events through another object
  76. Because the \ref Object::SendEvent "SendEvent()" function is public, an event can be "masqueraded" as originating from any object, even when not actually sent by that object's member function code. This can be used to simplify communication, particularly between components in the scene. For example, the \ref Physics "physics simulation" signals collision events by using the participating \ref Node "scene nodes" as senders. This means that any component can easily subscribe to its own node's collisions without having to know of the actual physics components involved. The same principle can also be used in any game-specific messaging, for example making a "damage received" event originate from the scene node, though it itself has no concept of damage or health.
  77. \page MainLoop %Engine initialization and main loop
  78. Before a Urho3D application can enter its main loop, the Engine subsystem object must be created and initialized by calling its \ref Engine::Initialize "Initialize()" function. Parameters sent in a VariantMap can be used to direct how the Engine initializes itself and the subsystems. One way to configure the parameters is to parse them from the command line like the Urho3DPlayer application does: this is accomplished by the helper function \ref Engine::ParseParameters "ParseParameters()".
  79. The full list of supported parameters, their datatypes and default values:
  80. - Headless (bool) Headless mode enable. Default false.
  81. - LogLevel (int) %Log verbosity level. Default LOG_INFO in release builds and LOG_DEBUG in debug builds.
  82. - LogQuiet (bool) %Log quiet mode, ie. to not write warning/info/debug log entries into standard output. Default false.
  83. - LogName (string) %Log filename. Default "Urho3D.log".
  84. - FrameLimiter (bool) Whether to cap maximum framerate to 200 (desktop) or 60 (Android/iOS.) Default true.
  85. - WorkerThreads (bool) Whether to create worker threads for the %WorkQueue subsystem according to available CPU cores. Default true.
  86. - ResourcePaths (string) A semicolon-separated list of resource paths to use. If corresponding packages (ie. Data.pak for Data directory) exist they will be used instead. Default "CoreData;Data".
  87. - ResourcePackages (string) A semicolon-separated list of resource paths to use. Default empty.
  88. - ForceSM2 (bool) Whether to force %Shader %Model 2, effective in Direct3D9 mode only. Default false.
  89. - ExternalWindow (void ptr) External window handle to use instead of creating an application window. Default null.
  90. - WindowIcon (string) %Window icon image resource name. Default empty (use application default icon.)
  91. - WindowTitle (string) %Window title. Default "Urho3D".
  92. - WindowWidth (int) %Window horizontal dimension. Default 0 (use desktop resolution, or 1024 in windowed mode.)
  93. - WindowHeight (int) %Window vertical dimension. Default 0 (use desktop resolution, or 768 in windowed mode.)
  94. - WindowResizable (bool) Whether window is resizable. Default false.
  95. - FullScreen (bool) Whether to create a full-screen window. Default true.
  96. - Borderless (bool) Whether to create the window as borderless. Default false.
  97. - TripleBuffer (bool) Whether to use triple-buffering. Default false.
  98. - VSync (bool) Whether to wait for vertical sync when presenting rendering window contents. Default false.
  99. - Multisample (int) Hardware multisampling level. Default 1 (no multisampling.)
  100. - %RenderPath (string) Default renderpath resource name. Default empty, which causes forward rendering (Bin/CoreData/RenderPaths/Forward.xml) to be used.
  101. - Shadows (bool) Shadow rendering enable. Default true.
  102. - LowQualityShadows (bool) Low-quality (1 sample) shadow mode. Default false.
  103. - %Sound (bool) %Sound enable. Default true.
  104. - SoundBuffer (int) %Sound buffer length in milliseconds. Default 100.
  105. - SoundMixRate (int) %Sound output frequency in Hz. Default 44100.
  106. - SoundStereo (bool) Stereo sound output mode. Default true.
  107. - SoundInterpolation (bool) Interpolated sound output mode to improve quality. Default true.
  108. \section MainLoop_Frame Main loop iteration
  109. The main loop iteration (also called a frame) is driven by the Engine. In contrast it is the program's (for example Urho3DPlayer) responsibility to continuously loop this iteration by calling \ref Engine::RunFrame "RunFrame()". This function calls in turn the Time subsystem's \ref Time::BeginFrame "BeginFrame()" and \ref Time::EndFrame "EndFrame()" functions, and sends various update events in between. The event order is:
  110. - E_BEGINFRAME: signals the beginning of the new frame. Input and Network react to this to check for operating system window messages and arrived network packets.
  111. - E_UPDATE: application-wide logic update event. By default each update-enabled Scene reacts to this and triggers the scene update (more on this below.)
  112. - E_POSTUPDATE: application-wide logic post-update event. The UI subsystem updates its logic here.
  113. - E_RENDERUPDATE: Renderer updates its viewports here to prepare for rendering, and the UI generates render commands necessary to render the user interface.
  114. - E_POSTRENDERUPDATE: by default nothing hooks to this. This can be used to implement logic that requires the rendering views to be up-to-date, for example to do accurate raycasts. Scenes may not be modified at this point; especially scene objects may not be deleted or crashes may occur.
  115. - E_ENDFRAME: signals the end of the frame. Before this, rendering the frame and measuring the next frame's timestep will have occurred.
  116. The update of each Scene causes further events to be sent:
  117. - E_SCENEUPDATE: variable timestep scene update. This is a good place to implement any scene logic that does not need to happen at a fixed step.
  118. - E_SCENESUBSYSTEMUPDATE: update scene-wide subsystems. Currently only the PhysicsWorld component listens to this, which causes it to step the physics simulation and send the following two events for each simulation step:
  119. - E_PHYSICSPRESTEP: called before the simulation iteration. Happens at a fixed rate (the physics FPS.) If fixed timestep logic updates are needed, this is a good event to listen to.
  120. - E_PHYSICSPOSTSTEP: called after the simulation iteration. Happens at the same rate as E_PHYSICSPRESTEP.
  121. - E_SMOOTHINGUPDATE: update SmoothedTransform components in network client scenes.
  122. - E_SCENEPOSTUPDATE: variable timestep scene post-update. ParticleEmitter and AnimationController update themselves as a response to this event.
  123. Variable timestep logic updates are preferable to fixed timestep, because they are only executed once per frame. In contrast, if the rendering framerate is low, several physics simulation steps will be performed on each frame to keep up the apparent passage if time, and if this also causes a lot of logic code to be executed for each step, the program may bog down further if the CPU can not handle the load. Note that the Engine's \ref Engine::SetMinFps "minimum FPS", by default 10, sets a hard cap for the timestep to prevent spiraling down to a complete halt; if exceeded, animation and physics will instead appear to slow down.
  124. \section MainLoop_ApplicationState Main loop and the application activation state
  125. The application window's state (has input focus, minimized or not) can be queried from the Input subsystem. It can also effect the main loop in the following ways:
  126. - Rendering is always skipped when the window is minimized.
  127. - To avoid spinning the CPU and GPU unnecessarily, it is possible to define a smaller maximum FPS when no input focus. See \ref Engine::SetMaxInactiveFps "SetMaxInactiveFps()"
  128. - It is also possible to automatically pause update events and audio when the window is minimized. Use \ref Engine::SetPauseMinimized "SetPauseMinimized()" to control this behaviour. By default it is not enabled on desktop, and enabled on mobile devices (Android and iOS.) For singleplayer games this is recommended to avoid unwanted progression while away from the program. However in a multiplayer game this should not be used, as the missing scene updates would likely desync the client with the server.
  129. - On mobile devices the window becoming minimized can mean that it will never become maximized again, in case the OS decides it needs to free memory and kills your program. Therefore you should listen for the E_INPUTFOCUS event from the Input subsystem and immediately save your program state as applicable if the program loses input focus or is minimized.
  130. - On mobile devices it is also unsafe to access or create any graphics resources while the window is minimized (as the graphics context may be destroyed during this time); doing so can crash the program. It is recommended to leave the pause-minimized feature on to ensure you do not have to check for this in your update code.
  131. Note that on iOS calling \ref Engine::Exit "Exit()" is a no-op as there is no officially sanctioned way to manually exit your program. On Android it will cause the activity to manually exit.
  132. \section MainLoop_ApplicationFramework Application framework
  133. The Application class provides a minimal framework for a Urho3D C++ application with a main loop. It has virtual functions Setup(), Start() and Stop() which can be defined by the application subclass. The header file also provides a macro for defining a program entry point, which
  134. will instantiate the Context object and then the user-specified application class. A minimal example, which would just display a blank rendering window and exit by pressing ESC:
  135. \code
  136. #include "Application.h"
  137. #include "Engine.h"
  138. #include "InputEvents.h"
  139. class MyApp : public Application
  140. {
  141. public:
  142. MyApp(Context* context) :
  143. Application(context)
  144. {
  145. }
  146. virtual void Setup()
  147. {
  148. // Called before engine initialization. engineParameters_ member variable can be modified here
  149. }
  150. virtual void Start()
  151. {
  152. // Called after engine initialization. Setup application & subscribe to events here
  153. SubscribeToEvent(E_KEYDOWN, HANDLER(MyApp, HandleKeyDown));
  154. }
  155. virtual void Stop()
  156. {
  157. // Perform optional cleanup after main loop has terminated
  158. }
  159. void HandleKeyDown(StringHash eventType, VariantMap& eventData)
  160. {
  161. using namespace KeyDown;
  162. // Check for pressing ESC. Note the engine_ member variable for convenience access to the Engine object
  163. int key = eventData[P_KEY].GetInt();
  164. if (key == KEY_ESC)
  165. engine_->Exit();
  166. }
  167. };
  168. DEFINE_APPLICATION_MAIN(MyApp)
  169. \endcode
  170. \page SceneModel %Scene model
  171. Urho3D's scene model can be described as a component-based scene graph. The Scene consists of a hierarchy of scene nodes, starting from the root node, which also represents the whole scene. Each Node has a 3D transform (position, rotation and scale), a name and an ID, and a freeform VariantMap for \ref Node::GetVars "user variables", but no other functionality.
  172. \section SceneModel_Components Components
  173. Rendering 3D objects, sound playback, physics and scripted logic updates are all enabled by creating different \ref Component "Components" into the nodes by calling \ref Node::CreateComponent "CreateComponent()". As with events, in C++ components are identified by type name hashes, and template forms of the component creation and retrieval functions exist for convenience. For example:
  174. \code
  175. Light* light = node->CreateComponent<Light>();
  176. \endcode
  177. In script, strings are used to identify component types instead, so the same code would look like:
  178. \code
  179. Light@ light = node.CreateComponent("Light");
  180. \endcode
  181. Because components are created using \ref ObjectTypes "object factories", a factory must be registered for each component type.
  182. Components created into the Scene itself have a special role: to implement scene-wide functionality. They should be created before all other components, and include the following:
  183. - Octree: implements spatial partitioning and accelerated visibility queries. Without this 3D objects can not be rendered.
  184. - PhysicsWorld: implements physics simulation. Physics components such as RigidBody or CollisionShape can not function properly without this.
  185. - DebugRenderer: implements debug geometry rendering.
  186. "Ordinary" components like Light, Camera or StaticModel should not be created directly into the Scene, but rather into child nodes.
  187. \section SceneModel_Identification Identification and scene hierarchy
  188. Unlike nodes, components do not have names; components inside the same node are only identified by their type, and index in the node's component list, which is filled in creation order. See the various overloads of \ref Node::GetComponent "GetComponent()" or \ref Node::GetComponents "GetComponents()" for details.
  189. When created, both nodes and components get scene-global integer IDs. They can be queried from the Scene by using the functions \ref Scene::GetNodeByID "GetNodeByID()" and \ref Scene::GetComponentByID "GetComponentByID()". This is much faster than for example doing recursive name-based scene node queries.
  190. There is no inbuilt concept of an entity or a game object; rather it is up to the programmer to decide the node hierarchy, and in which nodes to place any scripted logic. Typically, free-moving objects in the 3D world would be created as children of the root node. Nodes can be created either with or without a name, see \ref Node::CreateChild "CreateChild()". Uniqueness of node names is not enforced.
  191. Whenever there is some hierarchical composition, it is recommended (and in fact necessary, because components do not have their own 3D transforms) to create a child node. For example if a character was holding an object in his hand, the object should have its own node, which would be parented to the character's hand bone (also a Node.) The exception is the physics CollisionShape, which can be offsetted and rotated individually in relation to the node. See \ref Physics "Physics" for more details. Note that Scene's own transform is purposefully ignored as an optimization when calculating world derived transforms of child nodes, so changing it has no effect and it should be left as it is (position at origin, no rotation, no scaling.)
  192. %Scene nodes can be freely reparented. In contrast components are always created to the node they belong to, and can not be moved between nodes. Both child nodes and components are stored using SharedPtr containers; this means that detaching a child node from its parent or removing a component will also destroy it, if no other references to it exist. Both Node & Component provide the \ref Node::Remove "Remove()" function to accomplish this without having to go through the parent. Note that no operations on the node or component in question are safe after calling that function.
  193. It is also legal to create a Node that does not belong to a scene. This is useful for example with a camera moving in a scene that may be loaded or saved, because then the camera will not be saved along with the actual scene, and will not be destroyed when the scene is loaded. However, note that creating geometry, physics or script components to an unattached node, and then moving it into a scene later will cause those components to not work correctly.
  194. \section SceneModel_Update Scene updates
  195. A Scene whose updates are enabled (default) will be automatically updated on each main loop iteration. See \ref Scene::SetUpdateEnabled "SetUpdateEnabled()".
  196. Nodes and components can be excluded from the scene update by disabling them, see \ref Node::SetEnabled "SetEnabled()". Disabling for example a drawable component also makes it invisible, a sound source component becomes inaudible etc. If a node is disabled, all of its components are treated as disabled regardless of their own enable/disable state.
  197. \section SceneModel_LoadSave Loading and saving scenes
  198. Scenes can be loaded and saved in either binary or XML format; see the functions \ref Scene::Load "Load()", \ref Scene::LoadXML "LoadXML()", \ref Scene::Save "Save()" and \ref Scene::SaveXML "SaveXML()". See \ref Serialization
  199. "Serialization" for the technical details on how this works. When a scene is loaded, all existing content in it (child nodes and components) is removed first.
  200. Nodes and components that are marked temporary will not be saved. See \ref Serializable::SetTemporary "SetTemporary()".
  201. To be able to track the progress of loading a (large) scene without having the program stall for the duration of the loading, a scene can also be loaded asynchronously. This means that on each frame the scene loads child nodes until a certain amount of milliseconds has been exceeded. See \ref Scene::LoadAsync "LoadAsync()" and \ref Scene::LoadAsyncXML "LoadAsyncXML()". Use the functions \ref Scene::IsAsyncLoading "IsAsyncLoading()" and \ref Scene::GetAsyncProgress "GetAsyncProgress()" to track the loading progress; the latter returns a float value between 0 and 1, where 1 is fully loaded. The scene will not update or render before it is fully loaded.
  202. \section SceneModel_Instantiation Object prefabs
  203. Just loading or saving whole scenes is not flexible enough for eg. games where new objects need to be dynamically created. On the other hand, creating complex objects and setting their properties in code will also be tedious. For this reason, it is also possible to save a scene node (and its child nodes, components and attributes) to either binary or XML to be able to instantiate it later into a scene. Such a saved object is often referred to as a prefab. There are three ways to do this:
  204. - In code by calling \ref Node::Save "Save()" or \ref Node::SaveXML "SaveXML()" on the Node in question.
  205. - In the editor, by selecting the node in the hierarchy window and choosing "Save node as" from the "File" menu.
  206. - Using the "node" command in AssetImporter, which will save the scene node hierarchy and any models contained in the input asset (eg. a Collada file)
  207. To instantiate the saved node into a scene, call \ref Scene::Instantiate "Instantiate()" or \ref Scene::InstantiateXML "InstantiateXML()" depending on the format. The node will be created as a child of the Scene but can be freely reparented after that. Position and rotation for placing the node need to be specified. The NinjaSnowWar example uses XML format for its object prefabs; these exist in the Bin/Data/Objects directory.
  208. \section SceneModel_FurtherInformation Further information
  209. For more information on the component-based scene model, see for example http://cowboyprogramming.com/2007/01/05/evolve-your-heirachy/. Note that the Urho3D scene model is not a pure Entity-Component-System design, which would have the components just as bare data containers, and only systems acting on them. Instead the Urho3D components contain logic of their own, and actively communicate with the systems (such as rendering, physics or script engine) they depend on.
  210. \page Resources Resources
  211. Resources include most things in Urho3D that are loaded from mass storage during initialization or runtime:
  212. - Animation
  213. - Image
  214. - Model
  215. - Material
  216. - ScriptFile
  217. - Shader
  218. - Sound
  219. - Technique
  220. - Texture2D
  221. - TextureCube
  222. - XMLFile
  223. They are managed and loaded by the ResourceCache subsystem. Like with all other \ref ObjectTypes "typed objects", resource types are identified by 16-bit type name hashes (C++) or type names (script). An object factory must be registered for each resource type.
  224. The resources themselves are identified by their file paths, relative to the registered resource directories or \ref PackageFile "package files". By default, the engine registers the resource directories Data and CoreData, or the packages Data.pak and CoreData.pak if they exist.
  225. If loading a resource fails, an error will be logged and a null pointer is returned.
  226. Typical C++ example of requesting a resource from the cache, in this case, a texture for a UI element. Note the use of a convenience template argument to specify the resource type, instead of using the type hash.
  227. \code
  228. healthBar->SetTexture(GetSubsystem<ResourceCache>()->GetResource<Texture2D>("Textures/HealthBarBorder.png"));
  229. \endcode
  230. The same in script would look like this (note the use of a property instead of a setter function):
  231. \code
  232. healthBar.texture = cache.GetResource("Texture2D", "Textures/HealthBarBorder.png");
  233. \endcode
  234. Resources can also be created manually and stored to the resource cache as if they had been loaded from disk.
  235. Memory budgets can be set per resource type: if resources consume more memory than allowed, the oldest resources will be removed from the cache if not in use anymore. By default the memory budgets are set to unlimited.
  236. \page Scripting Scripting
  237. To enable AngelScript scripting support, the Script subsystem needs to be created and registered after initializing the Engine. This is accomplished by the following code, seen eg. in Tools/Urho3DPlayer/Urho3DPlayer.cpp:
  238. \code
  239. context_->RegisterSubsystem(new Script(context_));
  240. \endcode
  241. There are three ways the AngelScript language can be interacted with in Urho3D:
  242. \section Scripting_Immediate Immediate execution
  243. Immediate execution takes one line of AngelScript, compiles it, and executes. This is not recommended for anything that needs high performance, but can be used for example to implement a developer console. Call the Script subsystem's \ref Script::Execute "Execute()" function to use. For example:
  244. \code
  245. GetSubsystem<Script>()->Execute("Print(\"Hello World!\");");
  246. \endcode
  247. It may be useful to be able to access a specific scene or a script file while executing immediate script code. These can be set on the Script subsystem by calling \ref Script::SetDefaultScene "SetDefaultScene()" and \ref Script::SetDefaultScriptFile "SetDefaultScriptFile()".
  248. \section Scripting_Procedural Calling a function from a script file
  249. This requires a successfully loaded ScriptFile resource, whose \ref ScriptFile::Execute "Execute()" function will be used. To identify the function to be called, its full declaration is needed. Parameters are passed in a VariantVector. For example:
  250. \code
  251. ScriptFile* file = GetSubsystem<ResourceCache>()->GetResource<ScriptFile>("Scripts/MyScript.as");
  252. VariantVector parameters;
  253. parameters.Push(Variant(100)); // Add an int parameter
  254. file->Execute("void MyFunction(int)", parameters); // Execute
  255. \endcode
  256. \ref ScriptFile::Execute "Execute()" also has an overload which takes a function pointer instead of querying by declaration. Using a pointer is naturally faster than a query, but also more risky: in case the ScriptFile resource is unloaded or reloaded, any function pointers will be invalidated.
  257. \section Scripting_Object Instantiating a script object
  258. The component ScriptInstance can be used to instantiate a specific class from within a script file. After instantiation, the the script object can respond to scene updates, \ref Events "events" and \ref Serialization "serialization" much like a component written in C++ would do, if it has the appropriate methods implemented. For example:
  259. \code
  260. ScriptInstance* instance = node->CreateComponent<ScriptInstance>();
  261. instance->CreateObject(GetSubsystem<ResourceCache>()->GetResource<ScriptFile>("Scripts/MyClass.as"), "MyClass");
  262. \endcode
  263. The class must implement the empty interface ScriptObject to make its base class statically known. This enables accessing any script object in the scene using ScriptInstance's \ref ScriptInstance::GetScriptObject "GetScriptObject()" function.
  264. The following methods that implement the component behaviour will be checked for. None of them are required.
  265. - void Start()
  266. - void Stop()
  267. - void DelayedStart()
  268. - void Update(float)
  269. - void PostUpdate(float)
  270. - void FixedUpdate(float)
  271. - void FixedPostUpdate(float)
  272. - void Save(Serializer&)
  273. - void Load(Deserializer&)
  274. - void WriteNetworkUpdate(Serializer&)
  275. - void ReadNetworkUpdate(Deserializer&)
  276. - void ApplyAttributes()
  277. - void TransformChanged()
  278. The update methods above correspond to the variable timestep scene update and post-update, and the fixed timestep physics world update and post-update. The application-wide update events are not handled by default.
  279. The Start() and Stop() methods do not have direct counterparts in C++ components. Start() is called just after the script object has been created. Stop() is called just before the script object is destroyed. This happens when the ScriptInstance is destroyed, or if the script class is changed.
  280. When a scene node hierarchy with script objects is instantiated (such as when loading a scene) any child nodes may not have been created yet when Start() is executed, and can thus not be relied upon for initialization. The DelayedStart() method can be used in this case instead: if defined, it is called immediately before any of the Update() calls.
  281. TransformChanged() is called whenever the scene node transform changes, similar to C++ components' OnMarkedDirty() function. Due to an optimization mechanism for repeated scene graph updates, you need to "undirty" the scene node by for example reading its world position to get further transform changed notifications.
  282. Subscribing to \ref Events "events" in script behaves differently depending on whether \ref Object::SubscribeToEvent "SubscribeToEvent()" is called from a script object's method, or from a procedural script function. If called from an instantiated script object, the ScriptInstance becomes the event receiver on the C++ side, and calls the specified handler method when the event arrives. If called from a function, the ScriptFile will be the event receiver and the handler must be a free function in the same script file. The third case is if the event is subscribed to from a script object that does not belong to a ScriptInstance. In that case the ScriptFile will create a proxy C++ object on demand to be able to forward the event to the script object.
  283. The script object's enabled state can be controlled through the \ref ScriptInstance::SetEnabled "SetEnabled()" function. When disabled, the scripted update methods or event handlers will not be called. This can be used to reduce CPU load in a large or densely populated scene.
  284. There are shortcut methods on the script side for creating and accessing a node's script object: node.CreateScriptObject() and node.GetScriptObject(). Alternatively, if the node has only one ScriptInstance, and a specific class is not needed, the node's scriptObject property can also be used. CreateScriptObject() takes the script file name (or alternatively, a ScriptFile object handle) and class name as parameters and creates a ScriptInstance component automatically, then creates the script object. For example:
  285. \code
  286. ScriptObject@ object = node.CreateScriptObject("Scripts/MyClass.as", "MyClass");
  287. \endcode
  288. Note that these are not actual Node member functions on the C++ side, as the %Scene classes are not allowed to depend on scripting.
  289. \section Scripting_ObjectSerialization Script object serialization
  290. After instantiation, the script object's public member variables that can be converted into Variant, and that don't begin with an underscore are automatically available as attributes of the ScriptInstance, and will be serialized.
  291. Node and Component handles are also converted into nodeID and componentID attributes automatically. Note: this automatic attribute mechanism means that a ScriptInstance's attribute list changes dynamically depending on the class that has been instantiated.
  292. If the script object contains more complex data structures, you can also serialize and deserialize into a binary buffer manually by implementing the Load() and Save() methods.
  293. %Network replication of the script object variables must be handled manually by implementing WriteNetworkUpdate() and ReadNetworkUpdate() methods, that also write and read a binary buffer. These methods should write/read all replicated of variables of the object. Additionally, the ScriptInstance must be marked for network replication by calling MarkNetworkUpdate() whenever the replicated data changes. Because this replication mechanism can not sync per variable, but always sends the whole binary buffer if even one bit of the data changes, also consider using the automatically replicated node user variables.
  294. \section Script_DelayedCalls Delayed method calls
  295. Delayed method calls can be used in script objects to implement time-delayed actions. Use the DelayedExecute() function in script object code to add a method to be executed later. The parameters are the delay in seconds, repeat flag, the full declaration of the function, and optionally parameters, which must be placed in a Variant array. For example:
  296. \code
  297. class Test : ScriptObject
  298. {
  299. void Start()
  300. {
  301. Array<Variant> parameters;
  302. parameters.Push(Variant(100));
  303. DelayedExecute(1.0, false, "void Trigger(int)", parameters);
  304. }
  305. void Trigger(int parameter)
  306. {
  307. Print("Delayed function triggered with parameter " + parameter);
  308. }
  309. }
  310. \endcode
  311. Delayed method calls can be removed by full declaration using the ClearDelayedExecute() function. If an empty declaration (default) is given as parameter, all delayed calls are removed.
  312. When a scene is saved/loaded, any pending delayed calls are also saved and restored properly.
  313. \section Script_ScriptAPI The script API
  314. Much of the Urho3D classes are exposed to scripts, however things that require low-level access or high performance (like direct vertex buffer access) are not. Also for scripting convenience some things have been changed from the C++ API:
  315. - The template array and string classes are exposed as Array<type> and String.
  316. - Public member variables are exposed without the underscore appended. For example x, y, z in Vector3.
  317. - Whenever only a single parameter is needed, setter and getter functions are replaced with properties. Such properties start with a lowercase letter. If an index parameter is needed, the property will be indexed. Indexed properties are in plural.
  318. - The element count property of arrays and other dynamic structures such as VariantMap and ResourceRefList is called "length", though the corresponding C++ function is usually Size().
  319. - Subsystems exist as global properties: time, fileSystem, log, cache, network, input, ui, audio, engine, graphics, renderer, script, console, debugHud.
  320. - Additional global properties exist for accessing the script object's node, the scene and the scene-wide components: node, scene, octree, physicsWorld, debugRenderer. When an object method is not executing, these are null. An exception: when the default scene for immediate execution has been set by calling \ref Script::SetDefaultScene "SetDefaultScene()", it is always available as "scene".
  321. - The currently executing script object's ScriptInstance component is available through the global property self.
  322. - The currently executing script file is available through the global property scriptFile.
  323. - The first script object created to a node is available as its scriptObject property.
  324. - Printing raw output to the log is simply called Print(). The rest of the logging functions are accessed by calling log.Debug(), log.Info(), log.Warning() and log.Error().
  325. - Functions that would take a StringHash or ShortStringHash parameter usually take a string instead. For example sending events, requesting resources and accessing components.
  326. - Most of StringUtils have been exposed as methods of the string class. For example String.ToBool().
  327. - Template functions for getting components or resources by type are not supported. Instead automatic type casts are performed as necessary.
  328. Check the automatically built \ref ScriptAPI "Scripting API" documentation for the exact function signatures. Note that the API documentation can be regenerated to the Urho3D log file by calling \ref Script::DumpAPI "DumpAPI()" function on the Script subsystem or by using \ref Tools_ScriptCompiler "ScriptCompiler tool".
  329. \section Script_Bytecode Precompiling scripts to bytecode
  330. Instead of compiling scripts from source on-the-fly during startup, they can also be precompiled to bytecode, then loaded. Use the \ref Tools_ScriptCompiler "ScriptCompiler" utility for this. In this case the resource request has to be pointed to the compiled file, which by default has the .asc extension:
  331. \code
  332. ScriptFile* file = GetSubsystem<ResourceCache>()->GetResource<ScriptFile>("Scripts/MyScript.asc");
  333. \endcode
  334. \section Scripting_Limitations Limitations
  335. There are some complexities of the scripting system one has to watch out for:
  336. - During the execution of the script object's constructor, the object is not yet associated with the ScriptInstance, and therefore subscribing to events, adding delayed method calls, or trying to access the node or scene will fail. The use of the constructor is best reserved for initializing member variables only.
  337. - When the resource request for a particular ScriptFile is initially made, the script file and the files it includes are compiled into an AngelScript script module. Each script module has its own class hierarchy that is not usable from other script modules, unless the classes are declared shared. See AngelScript documentation for more details.
  338. - If a ScriptFile resource is reloaded, all the script objects created from it will be destroyed, then recreated. They will lose any stored state as their constructors and Start() methods will be run again. This is rarely useful when running an actual game, but may be helpful during development.
  339. \section Scripting_Modifications AngelScript modifications
  340. The following changes have been made to AngelScript in Urho3D:
  341. - For performance reasons and to guarantee immediate removal of expired objects, AngelScript garbage collection has been disabled for script classes and the Array type. This has the downside that circular references will not be detected. Therefore, whenever you have object handles in your script, think of them as if they were C++ shared pointers and avoid creating circular references with them. For safety, consider using the value type WeakHandle, which is a WeakPtr<RefCounted> exposed to script and can be used to point to any engine object (but not to script objects.) An example of using WeakHandle:
  342. \code
  343. WeakHandle rigidBodyWeak = node.CreateComponent("RigidBody");
  344. RigidBody@ rigidBodyShared = rigidBodyWeak.Get(); // Is null if expired
  345. \endcode
  346. - %Object handle assignment can be done without the @ symbol if the object in question does not support value assignment. All exposed Urho3D C++ classes that derive from RefCounted never support value assignment. For example, when assigning the Model and Material of a StaticModel component:
  347. \code
  348. object.model = cache.GetResource("Model", "Models/Mushroom.mdl");
  349. object.material = cache.GetResource("Material", "Materials/Mushroom.xml");
  350. \endcode
  351. In unmodified AngelScript, this would have to be written as:
  352. \code
  353. @object.model = cache.GetResource("Model", "Models/Mushroom.mdl");
  354. @object.material = cache.GetResource("Material", "Materials/Mushroom.xml");
  355. \endcode
  356. \page LuaScripting Lua scripting
  357. Lua scripting in Urho3D has its dedicated LuaScript subsystem that must be instantiated before the scripting capabilities can be used. Lua support is not compiled in by default but must be enabled by the CMake
  358. build option -DENABLE_LUA=1. For more details see \ref Build_Options "Build options". Instantiating the subsystem is done like this:
  359. \code
  360. context_->RegisterSubsystem(new LuaScript(context_));
  361. \endcode
  362. Like AngelScript, Lua scripting supports immediate compiling and execution of single script lines, loading script files and executing procedural functions from them, and instantiating script objects
  363. to scene nodes using the LuaScriptInstance component.
  364. \section LuaScripting_Immediate Immediate execution
  365. Use \ref LuaScript::ExecuteString "ExecuteString()" to compile and run a line of Lua script. This should not be used for performance-critical operations.
  366. \section LuaScripting_ScriptFiles Script files and functions
  367. In contrast to AngelScript modules, which exist as separate entities and do not share functions or variables unless explicitly marked shared, in the Lua subsystem everything is loaded and executed in one Lua state, so scripts can naturally access everything loaded so far. To load and execute a Lua script file, call \ref LuaScript::ExecuteFile "ExecuteFile()".
  368. After that, the functions in the script file are available for calling. Use \ref LuaScript::GetFunction "GetFunction()" to get a Lua function by name. This returns a LuaFunction object, on which you should call \ref LuaFunction::BeginCall "BeginCall()" first, followed by pushing the function parameters if any, and finally execute the function with \ref LuaFunction::EndCall "EndCall()".
  369. \section LuaScripting_ScriptObjects Script objects
  370. By using the LuaScriptInstance component, Lua script objects can be added to scene nodes. After the component has been created, there are two ways to specify the object to instantiate: either specifying both the script file name and the object class name, in which case the script file is loaded and executed first, or specifying only the class name, in which case the Lua code containing the class definition must already have been executed. An example of creating a script object in C++ from the LuaIntegration sample, where a class called Rotator is instantiated from the script file Rotator.lua:
  371. \code
  372. LuaScriptInstance* instance = node->CreateComponent<LuaScriptInstance>();
  373. instance->CreateObject("LuaScripts/Rotator.lua", "Rotator");
  374. \endcode
  375. After instantiation, use \ref LuaScriptInstance::GetScriptObjectFunction "GetScriptObjectFunction()" to get the object's functions by name; calling happens like above.
  376. Like their AngelScript counterparts, script object classes can define functions which are automatically called by LuaScriptInstance for operations like initialization, scene update, or load/save. These functions are listed below. Refer to the \ref Scripting "AngelScript scripting" page for details.
  377. - Start()
  378. - Stop()
  379. - Update(timeStep)
  380. - PostUpdate(timeStep)
  381. - FixedUpdate(timeStep)
  382. - FixedPostUpdate(timeStep)
  383. - Save(serializer)
  384. - Load(deserializer)
  385. - WriteNetworkUpdate(serializer)
  386. - ReadNetworkUpdate(deserializer)
  387. - ApplyAttributes()
  388. - TransformChanged()
  389. \section LuaScripting_Events Event handling
  390. Like in AngelScript, both procedural and object event handling is supported. In procedural event handling the LuaScript subsystem acts as the event receiver on the C++ side, and forwards the event to a Lua function. Use SubscribeToEvent and give the event name and the function to use as the handler. Optionally a specific sender object can be given as the first argument instead. For example, subscribing to the application-wide Update event, and getting its timestep parameter in the event handler function.
  391. \code
  392. SubscribeToEvent("Update", "HandleUpdate")
  393. ...
  394. function HandleUpdate(eventType, eventData)
  395. local timeStep = eventData:GetFloat("TimeStep")
  396. ...
  397. end
  398. \endcode
  399. When subscribing a script object to receive an event, use the form self:SubscribeToEvent instead. The function to use as the handler is given as "ClassName:FunctionName". For example subscribing to the NodeCollision physics event, and getting the participating other scene node and the contact point VectorBuffer in the handler function:
  400. \code
  401. CollisionDetector = ScriptObject()
  402. function CollisionDetector:Start()
  403. self:SubscribeToEvent(self.node, "NodeCollision", "CollisionDetector:HandleNodeCollision")
  404. end
  405. function CollisionDetector:HandleNodeCollision(eventType, eventData)
  406. local otherNode = eventData:GetPtr("Node", "OtherNode")
  407. local contacts = eventData:GetBuffer("Contacts")
  408. ...
  409. end
  410. \endcode
  411. \section LuaScripting_API The script API
  412. The binding of Urho3D C++ classes is accomplished with the tolua++ library, which for the most part binds the exact same function parameters as C++. Compared to the AngelScript API, you will always have the classes' Get / Set functions available, but in addition convenience properties also exist.
  413. When constructing Object subclasses, you need to supply the Context pointer as the first parameter. Use the global function GetContext() to get it. Similarly, use eg. GetFileSystem() or GetInput() to access the subsystems.
  414. As seen above from the event handling examples, VariantMap handling has some differences to both C++ and AngelScript. To get a value, supply its key name as a string. To get a pointer to an object, supply first the object type, then the key name.
  415. For the rest of the functions and classes, see the generated \ref LuaScriptAPI "Lua script API reference". Also, look at the Lua counterparts of the sample applications in the Bin/Data/LuaScripts directory and compare them to the C++ and AngelScript versions to familiarize yourself with how things are done on the Lua side.
  416. \section LuaScripting_Allocation Object allocation & Lua garbage collection
  417. There are two ways to allocate a C++ object in Lua scripting, which behave differently with respect to Lua's automatic garbage collection:
  418. 1) Call object's contructor:
  419. \code
  420. local context = GetContext()
  421. local scene = Scene(context)
  422. \endcode
  423. tolua++ will register this C++ object with garbage collection, and Lua will collect it eventually. Do not use this form if you will add the
  424. object to an object hierarchy that is kept alive on the C++ side with SharedPtr's, for example child scene nodes or %UI child elements.
  425. Otherwise the object will be double-deleted, resulting in a crash.
  426. 2) Call the new function:
  427. \code
  428. local context = GetContext()
  429. local text = Text:new(context)
  430. \endcode
  431. When using this form the object will not collected by Lua, so it is safe to pass into C++ object hierarchies.
  432. Otherwise, to prevent memory leaks it needs to be deleted manually by calling the delete function on it:
  433. \code
  434. text:delete()
  435. \endcode
  436. When you call the \ref ResourceCache::GetFile "GetFile()" function of ResourceCache from Lua, the file you receive must also be manually deleted like described above once you are done with it.
  437. \page Rendering Rendering
  438. Much of the rendering functionality in Urho3D is built on two subsystems, Graphics and Renderer.
  439. \section Rendering_Graphics Graphics
  440. Graphics implements the low-level functionality:
  441. - Creating the window and the rendering context
  442. - Setting the screen mode
  443. - Keeping track of GPU resources
  444. - Keeping track of rendering context state (current rendertarget, vertex and index buffers, textures, shaders and renderstates)
  445. - Performing primitive rendering operations
  446. - Handling lost device
  447. Screen resolution, fullscreen/windowed, vertical sync and hardware multisampling level are all set at once by calling Graphics's \ref Graphics::SetMode "SetMode()" function. There is also an experimental option of rendering to an existing window by passing its OS-specific handle to \ref Graphics::SetExternalWindow "SetExternalWindow()" before setting the initial screen mode.
  448. When setting the initial screen mode, Graphics does a few checks:
  449. - For Direct3D9, the supported shader model is checked. 2 is minimum, but 3 will be used if available. SM2 can be forced by calling \ref Graphics::SetForceSM2 "SetForceSM2()" before setting the initial screen mode.
  450. - For OpenGL, version 2.0 with EXT_framebuffer_object, EXT_packed_depth_stencil and EXT_texture_filter_anisotropic extensions is checked for.
  451. - Is hardware instancing supported? This requires shader model 3 on Direct3D9 and the ARB_instanced_arrays extension on OpenGL.
  452. - Are hardware shadow maps supported? Both ATI & NVIDIA style shadow maps can be used. If neither are available, no shadows will be rendered.
  453. - Are light pre-pass and deferred rendering modes supported? These require sufficient multiple rendertarget support, and R32F texture format support.
  454. \section Rendering_Renderer Renderer
  455. Renderer implements the actual rendering of 3D views each frame, and controls global settings such as texture quality, material quality, specular lighting and shadow map base resolution.
  456. To render, it needs a Scene with an Octree component, and a Camera that does not necessarily have to belong to the scene. The octree stores all visible components (derived from Drawable) to allow querying for them in an accelerated manner. The needed information is collected in a Viewport object, which can be assigned with Renderer's \ref Renderer::SetViewport "SetViewport()" function.
  457. By default there is one viewport, but the amount can be increased with the function \ref Renderer::SetNumViewports "SetNumViewports()". The viewport(s) should cover the entire screen or otherwise hall-of-mirrors artifacts may occur. By specifying a zero screen rectangle the whole window will be used automatically. The viewports will be rendered in ascending order, so if you want for example to have a small overlay window on top of the main viewport, use viewport index 0 for the main view, and 1 for the overlay.
  458. Viewports can also be defined for rendertarget textures. See \ref AuxiliaryViews "Auxiliary views" for details.
  459. Each viewport defines a command sequence for rendering the scene, the \ref RenderPaths "render path". By default there exist forward, light pre-pass and deferred render paths in the Bin/CoreData/RenderPaths directory, see \ref Renderer::SetDefaultRenderPath "SetDefaultRenderPath()" to set the default for new viewports. If not overridden from the command line, forward rendering is the default. Deferred rendering modes will be advantageous once there is a large number of per-pixel lights affecting each object, but their disadvantages are the lack of hardware multisampling and inability to choose the lighting model per material. In place of multisample antialiasing, a FXAA post-processing edge filter can be used, see the MultipleViewports sample application (Bin/Data/Scripts/09_MultipleViewports.as) for an example of how to use.
  460. The steps for rendering each viewport on each frame are roughly the following:
  461. - Query the octree for visible objects and lights in the camera's view frustum.
  462. - Check the influence of each visible light on the objects. If the light casts shadows, query the octree for shadowcaster objects.
  463. - Construct render operations (batches) for the visible objects, according to the scene passes in the render path command sequence.
  464. - Perform the render path command sequence during the rendering step at the end of the frame.
  465. In the default render paths, the rendering operations proceed in the following order:
  466. - Opaque geometry ambient pass, or G-buffer pass in deferred rendering modes.
  467. - Opaque geometry per-pixel lighting passes. For shadow casting lights, the shadow map is rendered first.
  468. - (%Light pre-pass only) Opaque geometry material pass, which renders the objects with accumulated per-pixel lighting.
  469. - Pre-alpha rendering pass for custom render ordering such as the skybox.
  470. - Transparent geometry rendering pass. Transparent, alpha-blended objects are sorted according to distance and rendered back-to-front to ensure correct blending.
  471. - Post-alpha rendering pass.
  472. \section Rendering_Drawable Rendering components
  473. The rendering-related components defined by the %Graphics and %UI libraries are:
  474. - Octree: spatial partitioning of Drawables for accelerated visibility queries. Needs to be created to the Scene (root node.)
  475. - Camera: describes a viewpoint for rendering, including projection parameters (FOV, near/far distance, perspective/orthographic)
  476. - Drawable: Base class for anything visible.
  477. - StaticModel: non-skinned geometry. Can LOD transition according to distance.
  478. - StaticModelGroup: renders several object instances while culling and receiving light as one unit.
  479. - Skybox: a subclass of StaticModel that appears to always stay in place.
  480. - AnimatedModel: skinned geometry that can do skeletal and vertex morph animation.
  481. - AnimationController: drives animations forward automatically and controls animation fade-in/out.
  482. - BillboardSet: a group of camera-facing billboards, which can have varying sizes, rotations and texture coordinates.
  483. - ParticleEmitter: a subclass of BillboardSet that emits particle billboards.
  484. - Light: illuminates the scene. Can optionally cast shadows.
  485. - Terrain: renders heightmap terrain.
  486. - CustomGeometry: renders runtime-defined unindexed geometry. The geometry data is not serialized or replicated over the network.
  487. - DecalSet: renders decal geometry on top of objects.
  488. - Zone: defines ambient light and fog settings for objects inside the zone volume.
  489. - Text3D: text that is rendered into the 3D view.
  490. \section Rendering_Optimizations Optimizations
  491. The following techniques will be used to reduce the amount of CPU and GPU work when rendering. By default they are all on:
  492. - Software rasterized occlusion: after the octree has been queried for visible objects, the objects that are marked as occluders are rendered on the CPU to a small hierarchical-depth buffer, and it will be used to test the non-occluders for visibility. Use \ref Renderer::SetMaxOccluderTriangles "SetMaxOccluderTriangles()" and \ref Renderer::SetOccluderSizeThreshold "SetOccluderSizeThreshold()" to configure the occlusion rendering.
  493. - Hardware instancing: rendering operations with the same geometry, material and light will be grouped together and performed as one draw call. Objects with a large amount of triangles will not be rendered as instanced, as that could actually be detrimental to performance. Use \ref Renderer::SetMaxInstanceTriangles "SetMaxInstanceTriangles()" to set the threshold. Note that even when instancing is not available, or the triangle count of objects is too large, they still benefit from the grouping, as render state only needs to be set once before rendering each group, reducing the CPU cost.
  494. - %Light stencil masking: in forward rendering, before objects lit by a spot or point light are re-rendered additively, the light's bounding shape is rendered to the stencil buffer to ensure pixels outside the light range are not processed.
  495. Note that many more optimization opportunities are possible at the content level, for example using geometry & material LOD, grouping many static objects into one object for less draw calls, minimizing the amount of subgeometries (submeshes) per object for less draw calls, using texture atlases to avoid render state changes, using compressed (and smaller) textures, and setting maximum draw distances for objects, lights and shadows.
  496. \section Rendering_GPUResourceLoss Handling GPU resource loss
  497. On Direct3D9 and Android OpenGL ES 2.0 it is possible to lose the rendering context (and therefore GPU resources) due to the application window being minimized to the background. Also, to work around possible GPU driver bugs the desktop OpenGL context will be voluntarily destroyed and recreated when changing screen mode or toggling between fullscreen and windowed. Therefore, on all graphics APIs one must be prepared for losing GPU resources.
  498. Textures that have been loaded from a file, as well as vertex & index buffers that have shadowing enabled will restore their contents automatically, the rest have to be restored manually. On Direct3D9 non-dynamic (managed) textures and buffers will never be lost, as the runtime automatically backs them up to system memory.
  499. See \ref GPUObject::IsDataLost "IsDataLost()" function in VertexBuffer, IndexBuffer, Texture2D and TextureCube classes for detecting data loss. Inbuilt classes such as Model, BillboardSet and Font already handle data loss for their internal GPU resources, so checking for it is only necessary for custom buffers and textures. Watch out especially for trying to render with an index buffer that has uninitialized data after a loss, as this can cause a crash inside the GPU driver due to referencing non-existent (garbage) vertices.
  500. \section Rendering_Further Further details
  501. See also \ref Materials "Materials", \ref Shaders "Shaders", \ref Lights "Lights and shadows", \ref RenderPaths "Render path", \ref SkeletalAnimation "Skeletal animation", \ref Particles "Particle systems", \ref Zones "Zones", and \ref AuxiliaryViews "Auxiliary views".
  502. See \ref RenderingModes "Rendering modes" for detailed discussion on the forward, light pre-pass and deferred rendering modes.
  503. See \ref APIDifferences "Differences between Direct3D9 and OpenGL" for what to watch out for when using the low-level rendering functionality directly.
  504. \page RenderingModes Rendering modes
  505. The default render paths supplied with Urho3D implement forward, light pre-pass and deferred rendering modes. Where they differ is how per-pixel lighting is calculated for opaque objects; transparent objects always use forward rendering. Note that on OpenGL ES 2.0 only forward rendering is available.
  506. \section RenderingModes_Forward Forward rendering
  507. Forward rendering begins with an ambient light pass for the objects; this also adds any per-vertex lights. Then, the objects are re-rendered for each per-pixel light affecting them (basic multipass rendering), up to the maximum per-pixel light count which is by default unlimited, but can be reduced with \ref Drawable::SetMaxLights "SetMaxLights()". The render operations are sorted by light, ie. render the effect of the first light on all affected objects first, then the second etc. If shadow maps are re-used (default on), a shadow casting light's shadow map will be updated immediately before rendering the lit objects. When shadow maps are not re-used, all shadow maps are updated first even before drawing the ambient pass.
  508. Materials can also define an optimization pass for forward rendering where the ambient light and the first per-pixel light are combined. This pass can not be used, however, if there are per-vertex lights affecting the object, or if the ambient light has a per-vertex gradient.
  509. \section RenderingModes_Prepass Light pre-pass rendering
  510. %Light pre-pass requires a minimum of two passes per object. First the normal, specular power, depth and lightmask (8 low bits only) of opaque objects are rendered to the following G-buffer:
  511. - RT0: World-space normal and specular power (D3DFMT_A8R8G8B8)
  512. - RT1: Linear depth (D3DFMT_R32F)
  513. - DS: Hardware depth and lightmask (D3DFMT_D24S8)
  514. After the G-buffer is complete, light volumes (spot and point lights) or fullscreen quads (directional lights) will be rendered to a light accumulation buffer to calculate the diffuse and specular light at each opaque pixel. Specular light is stored as intensity only. Stencil compare (AND operation) with the 8 low bits of the light's lightmask will be used for light culling. Similarly to forward rendering, shadow maps will be updated before each light as necessary.
  515. Finally the opaque objects are re-rendered during the material pass, which combines ambient and vertex lighting with per-pixel lighting from the light accumulation buffer. After this rendering proceeds to the pre-alpha pass, transparent object rendering pass, and the post-alpha pass, just like forward rendering.
  516. \section RenderingModes_Deferred Deferred rendering
  517. Deferred rendering needs to render each opaque object only once to the G-buffer, but this rendering pass is much heavier than in light pre-pass rendering, as also ambient, emissive and diffuse albedo information is output at the same time. The G-buffer is the following:
  518. - RT0: Final rendertarget with ambient, per-vertex and emissive color (D3DFMT_X8R8G8B8)
  519. - RT1: Diffuse albedo and specular intensity (D3DFMT_A8R8G8B8)
  520. - RT2: World-space normal and specular power (D3DFMT_A8R8G8B8)
  521. - RT3: Linear depth (D3DFMT_R32F)
  522. - DS: Hardware depth and lightmask (D3DFMT_D24S8)
  523. After the G-buffer has been rendered, light volumes will be rendered into the final rendertarget to accumulate per-pixel lighting. As the material albedo is available, all lighting calculations are final and output both the diffuse and specular color at the same time. After light accumulation rendering proceeds to pre-alpha, transparent, and post-alpha passes, as in other rendering modes.
  524. \section RenderingModes_Comparision Advantages and disadvantages
  525. Whether using forward or deferred rendering modes is more advantageous depends on the scene and lighting complexity.
  526. If the scene contains a large number of complex objects lit by multiple lights, forward rendering quickly increases the total draw call and vertex count due to re-rendering the objects for each light. However, light pre-pass and deferred rendering have a higher fixed cost due to the generation of the G-buffer. Also, in forward per-pixel lighting more calculations (such as light direction and shadow map coordinates) can be done at the vertex shader level, while in deferred all calculations need to happen per-pixel. This means that for a low light count, for example 1-2 per object, forward rendering will run faster based on the more efficient lighting calculations alone.
  527. Forward rendering makes it possible to use hardware multisampling and different shading models in different materials if needed, while neither is possible in the deferred modes. Also, only forward rendering allows to calculate the material's diffuse and specular light response with the most accuracy. %Light pre-pass rendering needs to reconstruct light specular color from the accumulated diffuse light color, which is inaccurate in case of overlapping lights. Deferred rendering on the other hand can not use the material's full specular color, it only stores a monochromatic intensity based on the green component into the G-buffer.
  528. %Light pre-pass rendering has a much more lightweight G-buffer pass, but it must render all opaque geometry twice. %Light accumulation in pre-pass mode is slightly faster than in deferred. Despite this, unless there is significant overdraw, in vertex-heavy scenes deferred rendering will likely be faster than light pre-pass.
  529. Finally note that due to OpenGL framebuffer object limitations an extra framebuffer blit has to happen at the end in both light pre-pass and deferred rendering, which costs some performance. Also, because multiple rendertargets on OpenGL must have the same format, an R32F texture can not be used for linear depth, but instead 24-bit depth is manually encoded and decoded into RGB channels.
  530. \page APIDifferences Differences between Direct3D9 and OpenGL
  531. These differences need to be observed when using the low-level rendering functionality directly. The high-level rendering architecture, including the Renderer and UI subsystems and the Drawable subclasses already handle most of them transparently to the user.
  532. - The post-projection depth range is (0,1) for Direct3D9 and (-1,1) for OpenGL. The Camera can be queried either for an API-specific or API-independent (Direct3D9 convention) projection matrix.
  533. - To render with 1:1 texel-to-pixel mapping, on Direct3D9 UV coordinates have to be shifted a half-pixel to the right and down, or alternatively vertex positions can be shifted a half-pixel left and up.
  534. - On Direct3D9 the depth-stencil surface can be equal or larger in size than the color rendertarget. On OpenGL the sizes must always match. Furthermore, OpenGL can not use the backbuffer depth-stencil surface when rendering to a texture. To overcome these limitations, Graphics will create correctly sized depth-stencil surfaces on demand whenever a texture is set as a color rendertarget, and a null depth-stencil is specified.
  535. - On Direct3D9 the viewport will be reset to full size when the first color rendertarget is changed. On OpenGL this does not happen. To ensure correct operation on both APIs, always use this sequence: first set the rendertargets, then the depth-stencil surface and finally the viewport.
  536. - On OpenGL modifying a texture will cause it to be momentarily set on the first texture unit. If another texture was set there, the assignment will be lost. Graphics performs a check to not assign textures redundantly, so it is safe and recommended to always set all needed textures before rendering.
  537. - Modifying an index buffer on OpenGL will similarly cause the existing index buffer assignment to be lost. Therefore, always set the vertex and index buffers before rendering.
  538. - %Shader resources are stored in different locations depending on the API: Bin/CoreData/Shaders/HLSL for Direct3D9, and Bin/CoreData/Shaders/GLSL for OpenGL.
  539. - To ensure similar UV addressing for render-to-texture viewports on both APIs, on OpenGL texture viewports will be rendered upside down.
  540. OpenGL ES 2.0 has further limitations:
  541. - Of the DXT formats, only DXT1 compressed textures will be uploaded as compressed, and only if the EXT_texture_compression_dxt1 extension is present. Other DXT formats will be uploaded as uncompressed RGBA. ETC1 (Android) and PVRTC (iOS) compressed textures are supported through the .ktx and .pvr file formats.
  542. - %Texture formats such as 16-bit and 32-bit floating point are not available. Corresponding integer 8-bit formats will be returned instead.
  543. - %Light pre-pass and deferred rendering are not supported due to missing multiple rendertarget support, and limited rendertarget formats.
  544. - Wireframe and point fill modes are not supported.
  545. - Due to texture unit limit (usually 8), point light shadow maps are not supported.
  546. - To reduce fillrate, the stencil buffer is not reserved and the stencil test is not available. As a consequence, the light stencil masking optimization is not used.
  547. - For improved performance, shadow mapping quality is reduced: there is no smooth PCF filtering and directional lights can have a maximum of 2 cascades.
  548. - Custom clip planes are not currently supported.
  549. \page Materials Materials
  550. Material and Technique resources define how to render 3D scene geometry. On the disk, they are XML data. By default, materials exist in the Bin/CoreData/Materials & Bin/Data/Materials subdirectories, and techniques exist in the Bin/CoreData/Techniques subdirectory.
  551. A material defines the textures, shader parameters and culling mode to use, and refers to techniques. A technique defines the actual rendering passes, the shaders to use in each, and all other rendering states such as depth test, depth write, and blending.
  552. A material definition looks like this:
  553. \code
  554. <material>
  555. <technique name="TechniqueName" quality="q" loddistance="d" sm3="true|false" />
  556. <texture unit="diffuse|normal|specular|emissive|environment" name="TextureName" />
  557. <texture ... />
  558. <parameter name="name" value="x y z w" />
  559. <parameter ... />
  560. <cull value="cw|ccw|none" />
  561. <shadowcull value="cw|ccw|none" />
  562. <depthbias constant="x" slopescaled="y" />
  563. </material>
  564. \endcode
  565. %Technique quality levels are specified from 0 (low) to 2 (high). When rendering, the highest available technique that does not exceed the Renderer's material quality setting will be chosen, see \ref Renderer::SetMaterialQuality "SetMaterialQuality()". If a technique requires SM3.0-only shaders, it can be marked as such by the "sm3" attribute.
  566. When a material defines several techniques for different LOD levels and quality settings, they must appear in a specific order:
  567. - Most distant & highest quality
  568. - ...
  569. - Most distant & lowest quality
  570. - Second most distant & highest quality
  571. - ...
  572. %Material shader parameters can be floats or vectors up to 4 components. Matrix parameters are not supported. Note that a built-in ElapsedTime shader parameter is available for implementing material animation effects; it measures the time elapsed in scene updates in seconds.
  573. Default culling mode is counterclockwise. The shadowcull element specifies the culling mode to use in the shadow pass. Note that material's depth bias settings do not apply in the shadow pass; during shadow rendering the light's depth bias is used instead.
  574. \section Materials_Textures Material textures
  575. Diffuse maps specify the surface color in the RGB channels. Optionally they can use the alpha channel for blending and alpha testing. They should preferably be compressed to DXT1 (no alpha or 1-bit alpha) or DXT5 (smooth alpha) format.
  576. Normal maps encode the tangent-space surface normal for normal mapping. There are two options for storing normals, which require choosing the correct material technique, as the pixel shader is different in each case:
  577. - Store as RGB. In this case use the DiffNormal techniques. This is the default used by AssetImporter, to ensure no conversion of normal textures needs to happen.
  578. - Store as xGxR, ie. Y-component in the green channel, and X-component in the alpha. In this case use the DiffNormalPacked techniques: Z will be reconstructed in the pixel shader. This encoding lends itself well to DXT5 compression. To convert normal maps to this format, you can use AMD's The Compressonator utility, see http://developer.amd.com/Resources/archive/ArchivedTools/gpu/compressonator/Pages/default.aspx
  579. Make sure the normal map is oriented correctly: an even surface should have the color value R 0.5 G 0.5 B 1.0.
  580. Specular maps encode the specular surface color as RGB. Note that deferred rendering is only able to use monochromatic specular intensity from the G channel, while forward and light pre-pass rendering use fully colored specular. DXT1 format should suit these textures well.
  581. Textures can have an accompanying XML file which specifies load-time parameters, such as addressing, mipmapping, and number of mip levels to skip on each quality level:
  582. \code
  583. <texture>
  584. <address coord="u|v|w" mode="wrap|mirror|clamp|border" />
  585. <border color="r g b a" />
  586. <filter mode="nearest|bilinear|trilinear|anisotropic|default" />
  587. <mipmap enable="false|true" />
  588. <quality low="x" medium="y" high="z" />
  589. <srgb enable="false|true" />
  590. </texture>
  591. \endcode
  592. The sRGB flag controls both whether the texture should be sampled with sRGB to linear conversion, and if used as a rendertarget, pixels should be converted back to sRGB when writing to it.
  593. To control whether the backbuffer should use sRGB conversion on write, call \ref Graphics::SetSRGB "SetSRGB()" on the Graphics subsystem.
  594. \section Materials_Techniques Techniques and passes
  595. A technique definition looks like this:
  596. \code
  597. <technique>
  598. <pass name="base|litbase|light|alpha|litalpha|postopaque|refract|postalpha|prepass|material|deferred|depth|shadow" vs="VertexShaderName" ps="PixelShaderName"
  599. lighting="unlit|pervertex|perpixel" alphatest="true|false" blend="replace|add|multiply|alpha|addalpha|premulalpha|invdestalpha"
  600. depthtest="always|equal|less|lessequal|greater|greaterequal" depthwrite="true|false" alphamask="true|false" />
  601. <pass ... />
  602. <pass ... />
  603. </technique>
  604. \endcode
  605. The purposes of the different passes are:
  606. - base: Renders ambient light, per-vertex lights and fog for an opaque object.
  607. - litbase: Renders the first per-pixel light, ambient light and fog for an opaque object. This is an optional pass for optimization.
  608. - light: Renders one per-pixel light's contribution additively for an opaque object.
  609. - alpha: Renders ambient light, per-vertex lights and fog for a transparent object.
  610. - litalpha: Renders one per-pixel light's contribution additively for a transparent object
  611. - postopaque: Custom rendering pass after opaque geometry. Can be used to render the skybox.
  612. - refract: Custom rendering pass after postopaque pass. Can sample the viewport texture from the environment texture unit to render refractive objects.
  613. - postalpha: Custom rendering pass after transparent geometry.
  614. - prepass: %Light pre-pass only - renders normals, specular power and depth to the G-buffer.
  615. - material: %Light pre-pass only - renders opaque geometry final color by combining ambient light, per-vertex lights and per-pixel light accumulation.
  616. - deferred: Deferred rendering only - renders ambient light and per-vertex lights to the output rendertarget, and diffuse albedo, normals, specular intensity + power and depth to the G-buffer.
  617. - depth: Renders linear depth to a rendertarget for post-processing effects.
  618. - shadow: Renders to a hardware shadow map (depth only) for shadow map generation.
  619. More custom passes can be defined and referred to in the \ref RenderPaths "render path definition". For the built-in passes listed above, the lighting shader combinations to load (unlit, per-vertex or per-pixel) are recognized automatically, but for custom passes they need to be explicitly specified. The default is unlit.
  620. By default draw calls within passes are sorted by render state, but transparent base and light passes, as well as the postalpha pass, are sorted by distance back to front.
  621. Note that the technique does not need to enumerate shaders used for different geometry types (non-skinned, skinned, instanced, billboard) and different per-vertex and per-pixel light combinations. Instead specific hardcoded shader variations are assumed to exist. See \ref Shaders "Shaders" for details.
  622. The optional "litbase" pass reduces draw call count by combining ambient lighting with the first per-pixel light affecting an object. However, it has intentional limitations to not require too many shader permutations: there must be no vertex lights affecting the object, and the ambient lighting can not have a gradient. In case of excessive overdraw, it is possibly better not to define it, but instead allow the base pass (which is computationally very lightweight) to run first, initializing the Z buffer for later passes.
  623. "Alphamask" is not an actual rendering state, but a hint which tells that the pixel shader will use discard based on alpha. Because this may interfere with the early-Z culling, materials without the alpha masking hint will be drawn first.
  624. The refract pass requires pingponging the scene rendertarget to a texture, but this will not be performed if there is no refractive geometry to render, so there is no unnecessary cost to it.
  625. \page Shaders Shaders
  626. Urho3D uses an ubershader-like approach: permutations of each shader will be built with different defines, to produce eg. static or skinned, deferred or forward or shadowed/unshadowed rendering.
  627. The building of these variations is controlled by an XML shader definition file, which always accompanies the shader source code.
  628. For example, the Basic shader in Bin/CoreData/Shaders/HLSL has the following definition file:
  629. \code
  630. <shaders>
  631. <shader type="vs">
  632. <option name="Diff" define="DIFFMAP" />
  633. <option name="VCol" define="VERTEXCOLOR" />
  634. <variation name="" />
  635. <variation name="Skinned" define="SKINNED" />
  636. <variation name="Instanced" define="INSTANCED" require="SM3" />
  637. <variation name="Billboard" define="BILLBOARD" />
  638. </shader>
  639. <shader type="ps">
  640. <option name="Diff" define="DIFFMAP" exclude="Alpha" />
  641. <option name="Alpha" define="ALPHAMAP" exclude="Diff" />
  642. <option name="AlphaMask" define="ALPHAMASK" require="DIFFMAP" />
  643. <option name="VCol" define="VERTEXCOLOR" />
  644. </shader>
  645. </shaders>
  646. \endcode
  647. Permutations can be defined separately for both the vertex and the pixel shader. There exist two different mechanisms for adding permutations: a "variation" and an "option". A variation always excludes other variations within the same "variation group" and is most often used to define the geometry types for the vertex shader. In contrast, an
  648. option does not exclude other options by default.
  649. For both variations and options, a name is required, and one or more defines to be passed to the shader compiler. If there is only one define, it can be listed as an attribute of the permutation, as shown. If many defines are needed, they can be included as child elements of the permutation, eg. (from the LitSolid shader)
  650. \code
  651. <variation name="Dir">
  652. <define name="DIRLIGHT" />
  653. <define name="PERPIXEL" />
  654. </variation>
  655. \endcode
  656. A variation or option can "require" other defines, for example in HLSL shaders instancing requires the SM3 define which tells that we are compiling for %Shader %Model 3. Like defines, requires can be listed either as an attribute (if only one) or as a child element (if many.)
  657. Additionally, an option can "include" or "exclude" other options. Use this mechanism instead of variations if complex dependencies between options is required, instead of simple exclusion. Again, includes or excludes can be listed either as attributes or child elements.
  658. The final name of a shader permutation is formed by taking the shader's name, adding an underscore (if there are active variations/options) and then listing the names of all active variations/options in top-to-bottom order. For example the Basic vertex shader with vertex color and skinning active would be
  659. Basic_VColSkinned. This final name is used to request shaders from the Renderer subsystem.
  660. \section Shaders_Required Required shader permutations
  661. When rendering scene objects, the engine expects certain shader permutations to exist for different geometry types and lighting conditions. These must appear in the specific order listed below. Use the LitSolid shader for reference.
  662. Vertex shader:
  663. - 1VL, 2VL, 3VL, 4VL: number of vertex lights influencing the object
  664. - Dir, Spot, Point: a per-pixel forward light is being used
  665. - Spec: the per-pixel forward light has specular calculations
  666. - Shadow: the per-pixel forward light has shadowing
  667. - Skinned, Instanced, %Billboard: choosing the geometry type
  668. Pixel shader:
  669. - Dir, Spot, Point: a per-pixel forward light is being used
  670. - Mask: the point light has a cube map mask
  671. - Spec: the per-pixel forward light has specular calculations
  672. - Shadow: the per-pixel forward light has shadowing
  673. - LQ: use low-quality shadowing (1 hardware PCF sample instead of 4)
  674. - HW: use hardware shadow depth compare, Direct3D9 only
  675. \section Shaders_Writing Writing shaders
  676. Shaders must be written separately for HLSL (Direct3D9) and GLSL (OpenGL). The built-in shaders try to implement the same functionality on both shader languages as closely as possible.
  677. To get started with writing your own shaders, start with studying the most basic examples possible: the Basic, Shadow & Unlit shaders. Note the required include files which bring common functionality, for example Uniforms.hlsl, Samplers.hlsl & Transform.hlsl for HLSL shaders.
  678. Transforming the vertex (which hides the actual skinning, instancing or billboarding process) is a slight hack which uses a combination of macros and functions: it is safest to copy the following piece of code verbatim:
  679. For HLSL:
  680. \code
  681. float4x3 modelMatrix = iModelMatrix;
  682. float3 worldPos = GetWorldPos(modelMatrix);
  683. oPos = GetClipPos(worldPos);
  684. \endcode
  685. For GLSL:
  686. \code
  687. mat4 modelMatrix = iModelMatrix;
  688. vec3 worldPos = GetWorldPos(modelMatrix);
  689. gl_Position = GetClipPos(worldPos);
  690. \endcode
  691. Note that for HLSL shaders both the vertex and the pixel shader are written into the same file, and the functions must be called VS() and PS(), while for GLSL they are put into separate .vert and .frag files, and are called main().
  692. The uniforms must be prefixed in a certain way so that the shader compiler and the engine understand them:
  693. - c for uniform constants, for example cMatDiffColor. The c is stripped when referred to inside the engine, so it would be called "MatDiffColor" in eg. \ref Material::SetShaderParameter "SetShaderParameter()"
  694. - s for texture samplers, for example sDiffMap.
  695. In GLSL shaders it is important that the samplers are assigned to the correct texture units. If you are using sampler names that are not predefined in the engine like sDiffMap, just make sure there is a number somewhere in the sampler's name and it will be interpreted as the texture unit. For example the terrain shader uses texture units 0-3 in the following way:
  696. \code
  697. uniform sampler2D sWeightMap0;
  698. uniform sampler2D sDetailMap1;
  699. uniform sampler2D sDetailMap2;
  700. uniform sampler2D sDetailMap3;
  701. \endcode
  702. \page RenderPaths Render path
  703. %Scene rendering and any post-processing on a Viewport is defined by its RenderPath object, which can either be read from an XML file or be created programmatically.
  704. The render path consists of rendertarget definitions and commands. The commands are executed in order to yield the rendering result. Each command outputs either to the destination rendertarget & viewport (default if output definition is omitted), or one of the named rendertargets. MRT output is also possible.
  705. A rendertarget's size can be either absolute, divide the destination viewport size, or divide the destination rendertarget size. For an example of rendertarget definitions, see the inbuilt bloom effect in Bin/Data/PostProcess/Bloom.xml.
  706. The available commands are:
  707. - clear: Clear any of color, depth and stencil. Color clear can optionally use the fog color from the Zone visible at the far clip distance.
  708. - scenepass: Render scene objects whose \ref Materials "material technique" contains the specified pass. Will either be front-to-back ordered with state sorting, or back-to-front ordered with no state sorting. For deferred rendering, object lightmasks can be optionally marked to the stencil buffer. Vertex lights can optionally be handled during a pass, if it has the necessary shader combinations. Textures global to the pass can be bound to free texture units; these can either be the viewport, a named rendertarget, or a 2D texture resource identified with its pathname.
  709. - quad: Render a viewport-sized quad using the specified shaders. Textures can be bound and additionally shader parameters can be specified.
  710. - forwardlights: Render per-pixel forward lighting for opaque objects with the specified pass name. Shadow maps are also rendered as necessary.
  711. - lightvolumes: Render deferred light volumes using the specified shaders. G-buffer textures can be bound as necessary.
  712. A render path can be loaded from a main XML file by calling \ref RenderPath::Load "Load()", after which other XML files (for example one for each post-processing effect) can be appended to it by calling \ref RenderPath::Append "Append()". Rendertargets and commands can be enabled or disabled by calling \ref RenderPath::SetEnabled "SetEnabled()" to switch eg. a post-processing effect on or off. To aid in this, both can be identified by tag names, for example the bloom effect uses the tag "Bloom" for all of its rendertargets and commands.
  713. It is legal to both write to the destination viewport and sample from it during the same command: pingpong copies of its contents will be made automatically. If the viewport has hardware multisampling on, the multisampled backbuffer will be resolved to a texture before the first command which samples it.
  714. The render path XML definition looks like this:
  715. \code
  716. <renderpath>
  717. <rendertarget name="RTName" tag="TagName" enabled="true|false" size="x y"|sizedivisor="x y"|rtsizedivisor="x y"
  718. format="rgb|rgba|r32f|rgba16|rgba16f|rgba32f|rg16|rg16f|rg32f|lineardepth" filter="true|false" srgb="true|false" />
  719. <command type="clear" tag="TagName" enabled="true|false" clearcolor="r g b a|fog" cleardepth="x" clearstencil="y" output="viewport|RTName" />
  720. <command type="scenepass" pass="PassName" sort="fronttoback|backtofront" marktostencil="true|false" usescissor="true|false" vertexlights="true|false" metadata="base|alpha|gbuffer" >
  721. <output index="0" name="RTName1" />
  722. <output index="1" name="RTName2" />
  723. <output index="2" name="RTName3" />
  724. <texture unit="unit" name="viewport|RTName|TextureName" />
  725. </command>
  726. <command type="quad" vs="VertexShaderName" ps="PixelShaderName" output="viewport|RTName" />
  727. <texture unit="unit" name="viewport|RTName|TextureName" />
  728. <parameter name="ParameterName" value="x y z w" />
  729. </command>
  730. <command type="forwardlights" pass="PassName" uselitbase="true|false" output="viewport|RTName" />
  731. <command type="lightvolumes" vs="VertexShaderName" ps="PixelShaderName" output="viewport|RTName" />
  732. <texture unit="unit" name="viewport|RTName|TextureName" />
  733. </command>
  734. </renderpath>
  735. \endcode
  736. Note the special "lineardepth" format available for rendertargets. This is intended for storing scene depth in deferred rendering. It will be D3DFMT_R32F on Direct3D9, but RGBA on OpenGL, due to the limitation of all color buffers having to be the same format. The file Samplers.frag in Bin/CoreData/Shaders/GLSL provides functions to encode and decode linear depth to RGB.
  737. \section RenderPaths_ForwardLighting Forward lighting special considerations
  738. Otherwise fully customized scene render passes can be specified, but there are a few things to remember related to forward lighting:
  739. - The opaque base pass must be tagged with metadata "base". When forward lighting logic does the lit base pass optimization, it will search for a pass with the word "lit" prepended, ie. if your custom opaque base pass is
  740. called "custombase", the corresponding lit base pass would be "litcustombase".
  741. - The transparent base pass must be tagged with metadata "alpha". For lit transparent objects, the forward lighting logic will look for a pass with the word "lit" prepended, ie. if the custom alpha base pass is called "customalpha", the corresponding lit pass is "litcustomalpha". The lit drawcalls will be interleaved with the transparent base pass, and the scenepass command should have back-to-front sorting enabled.
  742. - If forward and deferred lighting are mixed, the G-buffer writing pass must be tagged with metadata "gbuffer" to prevent geometry being double-lit also with forward lights.
  743. - Remember to mark the lighting mode (per-vertex / per-pixel) into the techniques which define custom passes, as the lighting mode can be guessed automatically only for the known default passes.
  744. - The forwardlights command can optionally disable the lit base pass optimization without having to touch the material techniques, if a separate opaque ambient-only base pass is needed. By default the optimization is enabled.
  745. \section RenderPaths_PostProcess Post-processing effects special considerations
  746. Post-processing effects are usually implemented by using the quad command. When using intermediate rendertargets that are of different size than the viewport rendertarget, it is necessary in shaders to reference their (inverse) size and the half-pixel offset for Direct3D9. These shader uniforms are automatically generated for named rendertargets. For an example look at the bloom postprocess shaders: the rendertarget called HBlur will define the shader uniforms cHBlurInvSize and cHBlurOffsets (both Vector2.)
  747. In OpenGL post-processing shaders it is important to distinguish between sampling a rendertarget texture and a regular texture resource, because intermediate rendertargets (such as the G-buffer) may be vertically inverted. Use the GetScreenPos() or GetQuadTexCoord() functions to get rendertarget UV coordinates from the clip coordinates; this takes flipping into account automatically. For sampling a regular texture, use GetQuadTexCoordNoFlip() function, which requires world coordinates instead of clip coordinates.
  748. \page Lights Lights and shadows
  749. Lights in Urho3D can be directional, point, or spot lights. Shadow mapping is supported for all light types.
  750. A directional light's position has no effect, as it's assumed to be infinitely far away, only its rotation matters. It casts orthographically projected shadows. For increasing the shadow quality, cascaded shadow mapping (splitting the view into several shadow maps along the Z-axis) can be used.
  751. Point lights are spherical in shape. When a point light casts shadows, it will be internally split into 6 spot lights with a 90 degree FOV each. This is very expensive rendering-wise, so shadow casting point lights should be used sparingly.
  752. Spot lights have FOV & aspect ratio values like cameras to define the shape of the light cone.
  753. Both point and spot lights use an attenuation ramp texture to determine how the intensity varies with distance. In addition they have a shape texture, 2D for spot lights, and an optional cube texture for point lights. It is important that the spot light's shape texture has black at the borders, and has mipmapping disabled, otherwise there will be "bleeding" artifacts at the edges of the light cone.
  754. \section Lights_LightCulling Light culling
  755. When occlusion is used, a light will automatically be culled if its bounding box is fully behind an occluder. However, directional lights have an infinite bounding box, and can not be culled this way.
  756. It is possible to limit which objects are affected by each light, by calling \ref Drawable::SetLightMask "SetLightMask()" on both the light and the objects. The lightmasks of the light and objects are ANDed to check whether the light should have effect: the light will only illuminate an object if the result is nonzero. By default objects and lights have all bits set in their lightmask, thus passing this test always.
  757. \ref Zone "Zones" can also be used for light culling. When an object is inside a zone, its lightmask will be ANDed with the zone's lightmask before testing it against the lights' lightmasks. Using this mechanism, objects can change their accepted light set dynamically as they move through the scene.
  758. Care must be utilized when doing light culling with lightmasks, because they easily create situations where a light's influence is cut off unnaturally. However, they can be helpful in preventing light spill into undesired areas, for example lights inside one room bleeding into another, without having to resort into shadow-casting lights.
  759. In light pre-pass and deferred rendering, light culling happens by writing the objects' lightmasks to the stencil buffer during G-buffer rendering, and comparing the stencil buffer to the light's light mask when rendering light volumes. In this case lightmasks are limited to the low 8 bits only.
  760. \section Lights_ShadowedLights Shadowed lights
  761. Shadow rendering is easily the most complex aspect of using lights, and therefore a wide range of per-light parameters exists for controlling the shadows:
  762. - BiasParameters: define constant and slope-scaled depth bias values for preventing self-shadowing artifacts. In practice, need to be determined experimentally. Orthographic (directional) and projective (point and spot) shadows may require rather different bias values. Another way of fighting self-shadowing issues is to render shadowcaster backfaces, see \ref Rendering_Materials "Materials".
  763. - CascadeParameters: these have effect only for directional lights. They specify the far clip distance of each of the cascaded shadow map splits (maximum 4), and the fade start point relative to the maximum shadow range. Unused splits can be set to far clip 0.
  764. - FocusParameters: these have effect for directional and spot lights, and control techniques to increase shadow map resolution. They consist of focus enable flag (allows focusing the shadow camera on the visible shadow casters & receivers), nonuniform scale enable flag (allows better resolution), automatic size reduction flag (reduces shadow map resolution when the light is far away), and quantization & minimum size parameters for the shadow camera view.
  765. Additionally there are shadow fade distance, shadow intensity, shadow resolution and shadow near/far ratio parameters:
  766. - If both shadow distance and shadow fade distance are greater than zero, shadows start to fade at the shadow fade distance, and vanish completely at the shadow distance.
  767. - Shadow intensity defines how dark the shadows are, between 0.0 (maximum darkness, the default) and 1.0 (fully lit.)
  768. - The shadow resolution parameter scales the global shadow map size set in Renderer to determine the actual shadow map size. Maximum is 1.0 (full size) and minimum is 0.125 (one eighth size.) Choose according to the size and importance of the light; smaller shadow maps will be less performance hungry.
  769. - The shadow near/far ratio controls shadow camera near clip distance for point & spot lights. The default ratio is 0.002, which means a light with range 100 would have its shadow camera near plane set at the distance of 0.2. Set this as high as you can for better shadow depth resolution, but note that the bias parameters will likely have to be adjusted as well.
  770. Finally, there are global settings for the shadow map base resolution and shadow map depth (16 or 24 bit) & filtering quality (1 or 4 samples) in Renderer.
  771. \section Lights_ShadowCulling Shadow culling
  772. Similarly to light culling with lightmasks, shadowmasks can be used to select which objects should cast shadows with respect to each light. See \ref Drawable::SetShadowMask "SetShadowMask()". A potential shadow caster's shadow mask will be ANDed with the light's lightmask to see if it should be rendered to the light's shadow map. Also, when an object is inside a zone, its shadowmask will be ANDed with the zone's shadowmask as well. By default all bits are set in the shadowmask.
  773. For an example of shadow culling, imagine a house (which itself is a shadow caster) containing several objects inside, and a shadowed directional light shining in from the windows. In that case shadow map rendering can be avoided for objects already in shadow by clearing the respective bit from their shadowmasks.
  774. \section Lights_ShadowMapReuse Shadow map reuse
  775. The Renderer can be configured to either reuse shadow maps, or not. To reuse is the default, use \ref Renderer::SetReuseShadowMaps "SetReuseShadowMaps()" to change.
  776. When reuse is enabled, only one shadow texture of each shadow map size needs to be reserved, and shadow maps are rendered "on the fly" before rendering a single shadowed light's contribution onto opaque geometry. This has the downside that shadow maps are no longer available during transparent geometry rendering, so transparent objects will not receive shadows.
  777. When reuse is disabled, all shadow maps are rendered before the actual scene rendering. Now multiple shadow textures need to be reserved based on the number of simultaneous shadow casting lights. See the function \ref Renderer::SetNumShadowMaps "SetNumShadowMaps()". If there are not enough shadow textures, they will be assigned to the closest/brightest lights, and the rest will be rendered unshadowed. Now more texture memory is needed, but the advantage is that also transparent objects can receive shadows.
  778. \page SkeletalAnimation Skeletal animation
  779. The AnimatedModel component renders GPU-skinned geometry and is capable of skeletal animation. When a model is assigned to it using \ref AnimatedModel::SetModel "SetModel()", it creates a bone node hierarchy under its scene node, and these bone nodes can be moved and rotated to animate.
  780. There are two ways to play skeletal animations:
  781. - Manually, by adding or removing animation states to the AnimatedModel, and advancing their time positions & weights, see \ref AnimatedModel::AddAnimationState "AddAnimationState()", \ref AnimatedModel::RemoveAnimationState "RemoveAnimationState()", \ref AnimationState::AddTime "AddTime()" and \ref AnimationState::SetWeight "SetWeight()".
  782. - Using the AnimationController helper component: create it into the same scene node as the AnimatedModel, and use its functions, such as \ref AnimationController::Play "Play()" and \ref AnimationController::Stop "Stop()". AnimationController will advance the animations automatically during scene update. It also enables automatic network synchronization of animations, which the AnimatedModel does not do on its own.
  783. Note that AnimationController does not by default stop non-looping animations automatically once they reach the end, so their final pose will stay in effect. Rather they must either be stopped manually, or the \ref AnimationController::SetAutoFade "SetAutoFade()" function can be used to make them automatically fade out once reaching the end.
  784. \section SkeletalAnimation_Blending Animation blending
  785. %Animation blending uses the concept of numbered layers. Layer numbers are unsigned 8-bit integers, and the active \ref AnimationState "AnimationStates" on each layer are processed in order from the lowest layer to the highest. As animations are applied by lerp-blending between absolute bone transforms, the effect is that the higher layer numbers have higher priority, as they will remain in effect last.
  786. By default an Animation is played back by using all the available bone tracks. However an animation can be only partially applied by setting a start bone, see \ref AnimationState::SetStartBone "SetStartBone()". Once set, the bone tracks will be applied hierarchically starting from the start bone. For example, to apply an animation only to a bipedal character's upper body, which is typically parented to the spine bone, one could set the spine as the start bone.
  787. \section SkeletalAnimation_Triggers Animation triggers
  788. Animations can be accompanied with trigger data that contains timestamped Variant data to be interpreted by the application. This trigger data is in XML format next to the animation file itself. When an animation contains triggers, the AnimatedModel's scene node sends the E_ANIMATIONTRIGGER event each time a trigger point is crossed. The event data contains the timestamp, the animation name, and the variant data. Triggers will fire when the animation is advanced using \ref AnimationState::AddTime "AddTime()", but not when setting the absolute animation time position.
  789. The trigger data definition is below. Either normalized (0 = animation start, 1 = animation end) or non-normalized (time in seconds) timestamps can be used. See Bin/Data/Models/Ninja_Walk.xml and Bin/Data/Models/Ninja_Stealth.xml for examples; NinjaSnowWar implements footstep particle effects using animation triggers.
  790. \code
  791. <animation>
  792. <trigger time="t" normalizedtime="t" type="Int|Bool|Float|String..." value="x" />
  793. <trigger ... />
  794. </animation>
  795. \endcode
  796. \section SkeletalAnimation_ManualControl Manual bone control
  797. By default an AnimatedModel's bone nodes are reset on each frame, after which all active animation states are applied to the bones. This mechanism can be turned off per-bone basis to allow manual bone control. To do this, query a bone from the AnimatedModel's skeleton and set its \ref Bone::animated_ "animated_" member variable to false. For example:
  798. \code
  799. Bone* headBone = model->GetSkeleton().GetBone("Bip01_Head");
  800. if (headBone)
  801. headBone->animated_ = false;
  802. \endcode
  803. \section SkeletalAnimation_CombinedModels Combined skinned models
  804. To create a combined skinned model from many parts (for example body + clothes), several AnimatedModel components can be created to the same scene node. These will then share the same bone nodes. The component that was first created will be the "master" model which drives the animations; the rest of the models will just skin themselves using the same bones. For this to work, all parts must have been authored from a compatible skeleton, with the same bone names. The master model should have all the bones required by the combined whole (for example a full biped), while the other models may omit unnecessary bones. Note that if the parts contain compatible vertex morphs (matching names), the vertex morph weights will also be controlled by the master model and copied to the rest.
  805. \section SkeletalAnimation_NodeAnimation Node animations
  806. Animations can also be applied outside of an AnimatedModel's bone hierarchy, to control the transforms of named nodes in the scene. The AssetImporter utility will automatically save node animations in both model or scene modes to the output file directory.
  807. Like with skeletal animations, there are two ways to play back node animations:
  808. - Instantiate an AnimationState yourself, using the constructor which takes a root scene node (animated nodes are searched for as children of this node) and an animation pointer. You need to manually advance its time position, and then call \ref AnimationState::Apply "Apply()" to apply to the scene nodes.
  809. - Create an AnimationController component to the root scene node of the animation. This node should not contain an AnimatedModel component. Use the AnimationController to play back the animation just like you would play back a skeletal animation.
  810. %Node animations do not support blending, as there is no initial pose to blend from. Instead they are always played back with full weight. Note that the scene node names in the animation and in the scene must match exactly, otherwise the animation will not play.
  811. \page Particles %Particle systems
  812. The ParticleEmitter class derives from BillboardSet to implement a particle system that updates automatically.
  813. The particle system's properties can also be set through a XML description file, see \ref ParticleEmitter::Load "Load()".
  814. Most of the parameters can take either a single value, or minimum and maximum values to allow for random variation. See below for all supported parameters:
  815. \code
  816. <particleemitter>
  817. <material name="MaterialName" />
  818. <updateinvisible enable="true|false" />
  819. <relative enable="true|false" />
  820. <scaled enable="true|false" />
  821. <sorted enable="true|false" />
  822. <emittertype value="sphere|box" />
  823. <emittersize value="x y z" />
  824. <emitterradius value="x" />
  825. <direction min="x1 y1 z1" max="x2 y2 z2" />
  826. <constantforce value="x y z" />
  827. <dampingforce value="x" />
  828. <activetime value="t" />
  829. <inactivetime value="t" />
  830. <interval min="t1" max="t2" />
  831. <emissionrate min="t1" max="t2" />
  832. <particlesize min="x1 y1" max="x2 y2" />
  833. <timetolive min="t1" max="t2" />
  834. <velocity min="x1" max="x2" />
  835. <rotation min="x1" max="x2" />
  836. <rotationspeed min="x1" max="x2" />
  837. <sizedelta add="x" mul="y" />
  838. <color value="r g b a" />
  839. <colorfade color="r g b a" time="t" />
  840. <texanim uv="u1 v1 u2 v2" time="t" />
  841. </particleemitter>
  842. \endcode
  843. Notes:
  844. - Zero active or inactive time period means infinite.
  845. - Interval is the reciprocal of emission rate. Either can be used to define the rate at which new particles are emitted.
  846. - Instead of defining a single color element, several colorfade elements can be defined in time order to describe how the particles change color over time.
  847. - Use several texanim elements to define a texture animation for the particles.
  848. \page Zones Zones
  849. A Zone controls ambient lighting and fogging. Each geometry object determines the zone it is inside (by testing against the zone's oriented bounding box) and uses that zone's ambient light color, fog color and fog start/end distance for rendering. For the case of multiple overlapping zones, zones also have an integer priority value, and objects will choose the highest priority zone they touch.
  850. The viewport will be initially cleared to the fog color of the zone found at the camera's far clip distance. If no zone is found either for the far clip or an object, a default zone with black ambient and fog color will be used.
  851. Zones have two special flags: override mode and ambient gradient. If the camera is inside a zone with override mode enabled, all rendered objects will use that zone's ambient and fog settings, instead of the zone they belong to. This can be used for example to implement an underwater effect. When ambient gradient mode is enabled, the zone's own ambient color value is not used, but instead it will look for two highest-priority neighbor zones that touch it at the minimum and maximum Z face of its oriented bounding box: any objects inside will then get a per-vertex ambient color fade between the neighbor zones' ambient colors.
  852. Zones also define a lightmask and a shadowmask (with all bits set by default.) An object's final lightmask for light culling is determined by ANDing the object lightmask and the zone lightmask. The final shadowmask is also calculated in the same way.
  853. \page AuxiliaryViews Auxiliary views
  854. Auxiliary views are viewports assigned to a RenderSurface by calling its \ref RenderSurface::SetViewport "SetViewport()" function. By default these will be rendered on each frame that the texture containing the surface is visible, and can be typically used to implement for example camera displays or reflections. The texture in question must have been created in rendertarget mode, see Texture's \ref Texture2D::SetSize "SetSize()" function.
  855. The viewport is not assigned directly to the texture because of cube map support: a renderable cube map has 6 render surfaces, and done this way, a different camera could be assigned to each.
  856. A "backup texture" can be assigned to the rendertarget texture: because it is illegal to sample a texture that is also being simultaneously rendered to (in cases where the texture becomes "recursively" visible in the auxiliary view), the backup texture can be used to specify which texture should be used in place instead.
  857. Rendering detailed auxiliary views can easily have a large performance impact. Some things you can do for optimization with the auxiliary view camera:
  858. - Set the far clip distance as small as possible.
  859. - Use viewmasks on the camera and the scene objects to only render some of the objects in the auxiliary view.
  860. - Use the camera's \ref Camera::SetViewOverrideFlags "SetViewOverrideFlags()" function to disable shadows, to disable occlusion, or force the lowest material quality.
  861. The surface can also be configured to always update its viewports, or to only update when manually requested. See \ref RenderSurface::SetUpdateMode "SetUpdateMode()". For example an editor widget showing a rendered texture might use either of those modes. Call \ref RenderSurface::QueueUpdate "QueueUpdate()" to request a manual update of the surface on the current frame.
  862. \page Input %Input
  863. The Input subsystem provides keyboard, mouse, joystick and touch input via both a polled interface and events. It is always instantiated, even in headless mode, but is active only once the application window has been created. Once active, the subsystem takes over the operating system mouse cursor. It will be hidden by default, so the UI should be used to render a software cursor if necessary. For editor-like applications the operating system cursor can be made visible by calling \ref Input::SetMouseVisible "SetMouseVisible()".
  864. The input events include:
  865. - E_MOUSEBUTTONUP: a mouse button was released.
  866. - E_MOUSEBUTTONDOWN: a mouse button was pressed.
  867. - E_MOUSEMOVE: the mouse moved.
  868. - E_MOUSEWHEEL: the mouse wheel moved.
  869. - E_KEYUP: a key was released.
  870. - E_KEYDOWN: a key was pressed.
  871. - E_CHAR: translation of a keypress to Unicode charset for text entry. This is currently the only way to get translated key input.
  872. - E_TOUCHBEGIN: a finger touched the screen.
  873. - E_TOUCHEND: a finger was lifted from the screen.
  874. - E_TOUCHMOVE: a finger moved on the screen.
  875. - E_JOYSTICKBUTTONDOWN: a joystick button was pressed.
  876. - E_JOYSTICKBUTTONUP: a joystick button was released.
  877. - E_JOYSTICKAXISMOVE: a joystick axis was moved.
  878. - E_JOYSTICKHATMOVE: a joystick POV hat was moved.
  879. - E_CONTROLLERBUTTONDOWN: a joystick button on an SDL controller was pressed.
  880. - E_CONTROLLERBUTTONUP: a joystick button on an SDL controller was released.
  881. - E_CONTROLLERAXISMOVE: a joystick axis on an SDL controller was moved.
  882. The input polling API differentiates between the initiation of a key/mouse button press, and holding the key or button down. \ref Input::GetKeyPress "GetKeyPress()" and \ref Input::GetMouseButtonPress "GetMouseButtonPress()" return true only for one frame (the initiation) while \ref Input::GetKeyDown "GetKeyDown()" and \ref Input::GetMouseButtonDown "GetMouseButtonDown()" return true as long as the key or button is held down.
  883. In script, the polling API is accessed via properties: input.keyDown[], input.keyPress[], input.mouseButtonDown[], input.mouseButtonPress[], input.mouseMove.
  884. To get joystick input, the joystick(s) must first be explicitly opened using \ref Input::OpenJoystick "OpenJoystick()". Accessing the polled joystick state using \ref Input::GetJoystick "GetJoystick()" also automatically opens the joystick. The plugged in joysticks are detected on application start and must be manually redetected using \ref Input::DetectJoysticks "DetectJoysticks()" if they are plugged in or disconnected during runtime.
  885. If the joystick model is recognized by SDL you will also get the "controller" variety of events shown above, and the buttons and axes mappings utilize known constants such as CONTROLLER_BUTTON_A or CONTROLLER_AXIS_LEFTX without having to guess them.
  886. From the input subsystem you can also query whether the application window has input focus, or is minimized.
  887. On platforms that support it (such as Android) an on-screen virtual keyboard can be shown or hidden. When shown, keypresses from the virtual keyboard will be sent as char events just as if typed from an actual keyboard. Show or hide it by calling \ref Input::SetScreenKeyboardVisible "SetScreenKeyboardVisible()". The UI subsystem can also automatically show the virtual keyboard when a LineEdit element is focused, and hide it when defocused. This behavior can be controlled by calling \ref UI::SetUseScreenKeyboard "SetUseScreenKeyboard()".
  888. \page Audio %Audio
  889. The Audio subsystem implements an audio output stream. Once it has been initialized, the following operations are supported:
  890. - Playing raw audio, Ogg Vorbis or WAV Sound resources using the SoundSource component. This allows manual stereo panning of mono sounds; stereo sounds will be output with their original stereo mix.
  891. - Playing the above sound formats in pseudo-3D using the SoundSource3D component. It has stereo positioning and distance attenuation, but does not (at least yet) filter the sound depending on the direction.
  892. To hear pseudo-3D positional sounds, a SoundListener component must exist in a scene node and be assigned to the audio subsystem by calling \ref Audio::SetListener "SetListener()". If the sound listener's scene node exists within a specific scene, it will only hear sounds from that scene, but if it has been created into a "sceneless" node it will hear sounds from all scenes.
  893. The output is software mixed for an unlimited amount of simultaneous sounds. Ogg Vorbis sounds are decoded on the fly, and decoding them can be memory- and CPU-intensive, so WAV files are recommended when a large number of short sound effects need to be played.
  894. For purposes of volume control, each SoundSource is classified into one of four categories:
  895. - %Sound effects
  896. - Ambient
  897. - Music
  898. - Voice
  899. A master gain category also exists that affects the final output level. To control the category volumes, use \ref Audio::SetMasterGain "SetMasterGain()".
  900. The SoundSource components support automatic removal from the node they belong to, once playback is finished. To use, call \ref SoundSource::SetAutoRemove "SetAutoRemove()" on them. This may be useful when a game object plays several "fire and forget" sound effects.
  901. \section Audio_Parameters Sound parameters
  902. A standard WAV file can not tell whether it should loop, and raw audio does not contain any header information. Parameters for the Sound resource can optionally be specified through an XML file that has the same name as the sound, but .xml extension. Possible elements and attributes are described below:
  903. \code
  904. <sound>
  905. <format frequency="x" sixteenbit="true|false" stereo="true|false" />
  906. <loop enable="true|false" start="x" end="x" />
  907. </sound>
  908. \endcode
  909. The frequency is in Hz, and loop start and end are bytes from the start of audio data. If a loop is enabled without specifying the start and end, it is assumed to be the whole sound. Ogg Vorbis compressed sounds do not support specifying the loop range, only whether whole sound looping is enabled or disabled.
  910. The Audio subsystem is always instantiated, but in headless mode it is not active. In headless mode the playback of sounds is simulated, taking the sound length and frequency into account. This allows basing logic on whether a specific sound is still playing or not, even in server code.
  911. \page Physics Physics
  912. Urho3D implements rigid body physics simulation using the Bullet library.
  913. To use, a PhysicsWorld component must first be created to the Scene.
  914. The physics simulation has its own fixed update rate, which by default is 60Hz. When the rendering framerate is higher than the physics update rate, physics motion is interpolated so that it always appears smooth. The update rate can be changed with \ref PhysicsWorld::SetFps "SetFps()" function. The physics update rate also determines the frequency of fixed timestep scene logic updates.
  915. The other physics components are:
  916. - RigidBody: a physics object instance. Its parameters include mass, linear/angular velocities, friction and restitution.
  917. - CollisionShape: defines physics collision geometry. The supported shapes are box, sphere, cylinder, capsule, cone, triangle mesh, convex hull and heightfield terrain (requires the Terrain component in the same node.)
  918. - Constraint: connects two RigidBodies together, or one RigidBody to a static point in the world. Point, hinge, slider and cone twist constraints are supported.
  919. \section Physics_Movement Movement and collision
  920. Both a RigidBody and at least one CollisionShape component must exist in a scene node for it to behave physically (a collision shape by itself does nothing.) Several collision shapes may exist in the same node to create compound shapes. An offset position and rotation relative to the node's transform can be specified for each. Triangle mesh and convex hull geometries require specifying a Model resource and the LOD level to use.
  921. CollisionShape provides two APIs for defining the collision geometry. Either setting individual properties such as the \ref CollisionShape::SetShapeType "shape type" or \ref CollisionShape::SetSize "size", or specifying both the shape type and all its properties at once: see for example \ref CollisionShape::SetBox "SetBox()", \ref CollisionShape::SetCapsule "SetCapsule()" or \ref CollisionShape::SetTriangleMesh "SetTriangleMesh()".
  922. RigidBodies can be either static or moving. A body is static if its mass is 0, and moving if the mass is greater than 0. Note that the triangle mesh collision shape is not supported for moving objects; it will not collide properly due to limitations in the Bullet library. In this case the convex hull shape can be used instead.
  923. The collision behaviour of a rigid body is controlled by several variables. First, the collision layer and mask define which other objects to collide with: see \ref RigidBody::SetCollisionLayer "SetCollisionLayer()" and \ref RigidBody::SetCollisionMask "SetCollisionMask()". By default a rigid body is on layer 1; the layer will be ANDed with the other body's collision mask to see if the collision should be reported. A rigid body can also be set to \ref RigidBody::SetPhantom "phantom mode" to only report collisions without actually applying collision forces. This can be used to implement trigger areas. Finally, the \ref RigidBody::SetFriction "friction", \ref RigidBody::SetRollingFriction "rolling friction" and \ref RigidBody::SetRestitution "restitution" coefficients (between 0 - 1) control how kinetic energy is transferred in the collisions. Note that rolling friction is by default zero, and if you want for example a sphere rolling on the floor to eventually stop, you need to set a non-zero rolling friction on both the sphere and floor rigid bodies.
  924. By default rigid bodies can move and rotate about all 3 coordinate axes when forces are applied. To limit the movement, use \ref RigidBody::SetLinearFactor "SetLinearFactor()" and \ref RigidBody::SetAngularFactor "SetAngularFactor()" and set the axes you wish to use to 1 and those you do not wish to use to 0. For example moving humanoid characters are often represented by a capsule shape: to ensure they stay upright and only rotate when you explicitly set the rotation in code, set the angular factor to 0, 0, 0.
  925. To prevent tunneling of a fast moving rigid body through obstacles, continuous collision detection can be used. It approximates the object as a swept sphere, but has a performance cost, so it should be used only when necessary. Call \ref RigidBody::SetCcdRadius "SetCcdRadius()" and \ref RigidBody::SetCcdMotionThreshold "SetCcdMotionThreshold()" with non-zero values to enable. To prevent false collisions, the body's actual collision shape should completely contain the radius. The motion threshold is the required motion per simulation step for CCD to kick in: for example a box with size 1 should have motion threshold 1 as well.
  926. All physics calculations are performed in world space. Nodes containing a RigidBody component should preferably be parented to the Scene (root node) to ensure independent motion. For ragdolls this is not absolute, as retaining proper bone hierarchy is more important, but be aware that the ragdoll bones may drift far from the animated model's root scene node.
  927. \section Physics_ConstraintParameters Constraint parameters
  928. %Constraint position (and rotation if relevant) need to be defined in relation to both connected bodies, see \ref Constraint::SetPosition "SetPosition()" and \ref Constraint::SetOtherPosition "SetOtherPosition()". If the constraint connects a body to the static world, then the "other body position" and "other body rotation" mean the static end's transform in world space. There is also a helper function \ref Constraint::SetWorldPosition "SetWorldPosition()" to assign the constraint to a world-space position; this sets both relative positions.
  929. Specifying the constraint's motion axis instead of rotation is provided as an alternative as it can be more intuitive, see \ref Constraint::SetAxis "SetAxis()". However, by explicitly specifying a rotation you can be sure the constraint is oriented precisely as you want.
  930. Hinge, slider and cone twist constraints support defining limits for the motion. To be generic, these are encoded slightly unintuitively into Vector2's. For a hinge constraint, the low and high limit X coordinates define the minimum and maximum angle in degrees. For example -45 to 45. For a slider constraint, the X coordinates define the maximum linear motion in world space units, and the Y coordinates define maximum angular motion in degrees. The cone twist constraint uses only the high limit to define the maximum angles (minimum angle is always -maximum) in the following manner: The X coordinate is the limit of the twist (main) axis, while Y is the limit of the swinging motion about the other axes.
  931. \section Physics_Events Physics events
  932. The physics world sends 8 types of events during its update step:
  933. - E_PHYSICSPRESTEP before the simulation is stepped.
  934. - E_PHYSICSCOLLISIONSTART for each new collision during the simulation step. The participating scene nodes will also send E_NODECOLLISIONSTART events.
  935. - E_PHYSICSCOLLISION for each ongoing collision during the simulation step. The participating scene nodes will also send E_NODECOLLISION events.
  936. - E_PHYSICSCOLLISIONEND for each collision which has ceased. The participating scene nodes will also send E_NODECOLLISIONEND events.
  937. - E_PHYSICSPOSTSTEP after the simulation has been stepped.
  938. Note that if the rendering framerate is high, the physics might not be stepped at all on each frame: in that case those events will not be sent.
  939. \section Physics_Collision Reading collision events
  940. A new or ongoing physics collision event will report the collided scene nodes and rigid bodies, whether either of the bodies is a phantom, and the list of contact points.
  941. The contact points are encoded in a byte buffer, which can be read using the VectorBuffer or MemoryBuffer helper class. The following structure repeats for each contact:
  942. - World-space position (Vector3)
  943. - Normal vector (Vector3)
  944. - Distance, negative when interpenetrating (float)
  945. - Impulse applied in collision (float)
  946. An example of reading collision event and contact point data in script, from NinjaSnowWar game object collision handling code:
  947. \code
  948. void HandleNodeCollision(StringHash eventType, VariantMap& eventData)
  949. {
  950. Node@ otherNode = eventData["OtherNode"].GetNode();
  951. RigidBody@ otherBody = eventData["OtherBody"].GetRigidBody();
  952. VectorBuffer contacts = eventData["Contacts"].GetBuffer();
  953. while (!contacts.eof)
  954. {
  955. Vector3 contactPosition = contacts.ReadVector3();
  956. Vector3 contactNormal = contacts.ReadVector3();
  957. float contactDistance = contacts.ReadFloat();
  958. float contactImpulse = contacts.ReadFloat();
  959. // Do something with the contact data...
  960. }
  961. }
  962. \endcode
  963. \section Physics_Queries Physics queries
  964. The following queries into the physics world are provided:
  965. - Raycasts, see \ref PhysicsWorld::Raycast "Raycast()" and \ref PhysicsWorld::RaycastSingle "RaycastSingle()".
  966. - %Sphere cast (raycast with thickness), see \ref PhysicsWorld::SphereCast "SphereCast()".
  967. - %Sphere and box overlap tests, see \ref PhysicsWorld::GetRigidBodies() "GetRigidBodies()".
  968. - Which other rigid bodies are colliding with a body, see \ref RigidBody::GetCollidingBodies() "GetCollidingBodies()". In script this maps into the collidingBodies property.
  969. \page Navigation Navigation
  970. Urho3D implements navigation mesh generation and pathfinding by using the Recast & Detour libraries.
  971. The navigation functionality is exposed through the NavigationMesh and Navigable components.
  972. NavigationMesh collects geometry from its child nodes that have been tagged with the Navigable component. By default the Navigable component behaves recursively: geometry from its child nodes will be collected too, unless the recursion is disabled. If possible, physics CollisionShape geometry is preferred, however only the triangle mesh, convex hull and box shapes are supported. If no suitable physics geometry is found from a node, static drawable geometry is used instead from StaticModel and TerrainPatch components if they exist. The LOD level used is the same as for occlusion and raycasts (see \ref StaticModel::SetOcclusionLodLevel "SetOcclusionLodLevel()".
  973. The easiest way to make the whole scene participate in navigation mesh generation is to create the %NavigationMesh and %Navigable components to the scene root node.
  974. The navigation mesh generation must be triggered manually by calling \ref NavigationMesh::Build "Build()". After the initial build, portions of the mesh can also be rebuilt by specifying a world bounding box for the volume to be rebuilt, but this can not expand the total bounding box size. Once the navigation mesh is built, it will be serialized and deserialized with the scene.
  975. To query for a path between start and end points on the navigation mesh, call \ref NavigationMesh::FindPath "FindPath()".
  976. For a demonstration of the navigation capabilities, check the related sample application (Bin/Data/Scripts/15_Navigation.as), which features partial navigation mesh rebuilds (objects can be created and deleted) and querying paths.
  977. \page UI User interface
  978. Urho3D implements a simple, hierarchical user interface system based on rectangular elements. The elements provided are:
  979. - BorderImage: a texture image with an optional border
  980. - Button: a pushbutton
  981. - CheckBox: a button that can be toggled on/off
  982. - Cursor: a mouse cursor
  983. - DropDownList: shows a vertical list of items (optionally scrollable) as a popup
  984. - LineEdit: a single-line text editor
  985. - ListView: shows a scrollable vertical list of items
  986. - Menu: a button which can show a popup element
  987. - ScrollBar: a slider with back and forward buttons
  988. - ScrollView: a scrollable view of child elements
  989. - Slider: a horizontal or vertical slider bar
  990. - Sprite: a texture image which supports subpixel positioning, scaling and rotating.
  991. - Text: static text that can be multiline
  992. - UIElement: container for other elements, renders nothing by itself
  993. - View3D: a window that renders a 3D viewport
  994. - Window: a movable and resizable window
  995. The root %UI element can be queried from the UI subsystem. It is an empty canvas (UIElement) as large as the application window, into which other elements can be added.
  996. Elements are added into each other similarly as scene nodes, using the \ref UIElement::AddChild "AddChild()" and \ref UIElement::RemoveChild "RemoveChild()" functions. Each %UI element has also a \ref UIElement::GetVars "user variables" VariantMap for storing custom data.
  997. To allow the elements react to mouse input, either a mouse cursor element must be defined using \ref UI::SetCursor "SetCursor()" or the operating system mouse cursor must be set visible from the Input subsystem.
  998. \section UI_Defining Defining UI elements in XML
  999. User interface elements derive from Serializable, so they can be serialized to/from XML using their attributes. There are two distinct use cases for %UI definition files: either defining just the %UI element style and leaving the actual position and dimensions to be filled in later, or fully defining an %UI element layout. The default element style definitions, used for example by the editor and the debug console, are in the file Bin/Data/UI/DefaultStyle.xml.
  1000. The function \ref UI::LoadLayout "LoadLayout()" in UI will take an XML file and instantiate the elements defined in it. To be valid XML, there should be one root-level %UI element. An optional style XML file can be specified; the idea is to first read the element's style from that file, then fill in the rest from the actual layout XML file. This way the layout file can be relatively simple, as the majority of the data is already defined.
  1001. Note that a style can not be easily applied recursively to the loaded elements afterward. Therefore remember to specify the style file already when loading, or alternatively \ref UIElement::SetDefaultStyle "assign a default style file" to the %UI root element, which will then be picked up by all loaded layouts. This works because the %UI subsystem searches the style file by going up the parental chain starting from target parent %UI element. The search stops immediately when a style file is found or when it has reached the root element. Also note that Urho3D does not limit the number of style files being used at the same time in an application. You may have different style file set along the %UI parental hierarchy, if your application needs that.
  1002. See the elements' C++ code for all supported attributes, and look at the editor's user interface layouts in the Bin/Data/UI directory for examples. You can also use the Editor application to create %UI layouts. The serialization format is similar to scene XML serialization, with three important differences:
  1003. 1) The element type to instantiate, and the style to use for it can be set separately. For example the following element definition
  1004. \code
  1005. <element type="Button" style="CloseButton" />
  1006. \endcode
  1007. tells to instantiate a Button element, and that it should use the style "CloseButton" defined in the style XML file.
  1008. 2) Internal child elements, for example the scroll bars of a ScrollView, need to be marked as such to avoid instantiating them as duplicates. This is done by adding the attribute internal="true" to the XML element, and is required in both layout and style XML files. Furthermore, the elements must be listed in the order they have been added as children of the parent element (if in doubt, see the element's C++ constructor code. Omitting elements in the middle is OK.) For example:
  1009. \code
  1010. <element type="ScrollView" />
  1011. <element type="ScrollBar" internal="true" />
  1012. ...customize the horizontal scroll bar attributes here...
  1013. </element>
  1014. <element type="ScrollBar" internal="true" />
  1015. ...customize the vertical scroll bar attributes here...
  1016. </element>
  1017. </element>
  1018. \endcode
  1019. 3) The popup element shown by Menu and DropDownList is not an actual child element. In XML serialization, it is nevertheless stored as a child element, but is marked with the attribute popup="true".
  1020. \section UI_Layouts UI element layout
  1021. By default %UI elements operate in a "free" layout mode, where child elements' positions can be specified relative to any of the parent element corners, but they are not automatically positioned or resized.
  1022. To create automatically adjusting layouts, the layout mode can be switched to either "horizontal" or "vertical". Now the child elements will be positioned left to right or top to bottom, based on the order in which they were added. They will be preferably resized to fit the parent element, taking into account their minimum and maximum sizes, but failing to do that, the parent element will be resized.
  1023. Left, top, right & bottom border widths and spacing between elements can also be specified for the layout. A grid layout is not directly supported, but it can be manually created with a horizontal layout inside a vertical layout, or vice versa.
  1024. \section UI_Sprites Sprites
  1025. Sprites are a special kind of %UI element that allow subpixel (float) positioning and scaling, as well as rotation, while the other elements use integer positioning for pixel-perfect display. Sprites can be used to implement rotating HUD elements such as minimaps or speedometer needles.
  1026. Due to the free transformability, sprites can not be reliably queried with \ref UI::GetElementAt "GetElementAt()". Also, only other sprites should be parented to sprites, as the other elements do not support scaling and rotation.
  1027. \page Serialization Serialization
  1028. Classes that derive from Serializable can perform automatic serialization to binary or XML format by defining \ref AttributeInfo "attributes". Attributes are stored to the Context per class. %Scene load/save and network replication are both implemented by having the Node and Component classes derive from Serializable.
  1029. The supported attribute types are all those supported by Variant. Attributes can either define a direct memory offset into the object, or setter & getter functions. Zero-based enumerations are also supported, so that the enum values can be stored as text into XML files instead of just numbers. For editing, the attributes also have human-readable names.
  1030. To implement side effects to attributes, for example that a Node needs to dirty its world transform whenever the local transform changes, the default attribute access functions in Serializable can be overridden. See \ref Serializable::OnSetAttribute "OnSetAttribute()" and \ref Serializable::OnGetAttribute "OnGetAttribute()".
  1031. Each attribute can have a combination of the following flags:
  1032. - AM_FILE: Is used for file serialization (load/save.)
  1033. - AM_NET: Is used for network replication.
  1034. - AM_LATESTDATA: Frequently changing data for network replication, where only the latest values matter. Used for motion and animation.
  1035. - AM_NOEDIT: Is an internal attribute and is not to be shown for editing.
  1036. - AM_NODEID: Is a node ID and may need rewriting when instantiating scene content.
  1037. - AM_COMPONENTID: Is a component ID and may need rewriting when instantiating scene content.
  1038. The default flags are AM_FILE and AM_NET. Note that it is legal to define neither AM_FILE or AM_NET, meaning the attribute has only run-time significance (perhaps for editing.)
  1039. \page Network Networking
  1040. The Network subsystem provides reliable and unreliable UDP messaging using kNet. A server can be created that listens for incoming connections, and client connections can be made to the server. After connecting, code running on the server can assign the client into a scene to enable scene replication, provided that when connecting, the client specified a blank scene for receiving the updates.
  1041. %Scene replication is one-directional: the server always has authority and sends scene updates to the client at a fixed update rate, by default 30 FPS. The client responds by sending controls updates (buttons, yaw and pitch + possible extra data) also at a fixed rate.
  1042. Bidirectional communication between the server and the client can happen either using raw network messages, which are binary-serialized data, or remote events, which operate like ordinary events, but are processed on the receiving end only. Code on the server can send messages or remote events either to one client, all clients assigned into a particular scene, or to all connected clients. In contrast the client can only send messages or remote events to the server, not directly to other clients.
  1043. Note that if a particular networked application does not need scene replication, network messages and remote events can also be transmitted without assigning the client to a scene. The Chat example does just that: it does not create a scene either on the server or the client.
  1044. \section Network_Connecting Connecting to a server
  1045. Starting the server and connecting to it both happen through the Network subsystem. See \ref Network::StartServer "StartServer()" and \ref Network::Connect "Connect()". A UDP port must be chosen; the examples use the port 1234.
  1046. Note the scene (to be used for replication) and identity VariantMap supplied as parameters when connecting. The identity data can contain for example the user name or credentials, it is completely application-specified. The identity data is sent right after connecting and causes the E_CLIENTIDENTITY event to be sent on the server when received. By subscribing to this event, server code can examine incoming connections and accept or deny them. The default is to accept all connections.
  1047. After connecting successfully, client code can get the Connection object representing the server connection, see \ref Network::GetServerConnection "GetServerConnection()". Likewise, on the server a Connection object will be created for each connected client, and these can be iterated through. This object is used to send network messages or remote events to the remote peer, to assign the client into a scene (on the server only), or to disconnect.
  1048. \section Network_Replication Scene replication
  1049. %Network replication of scene content has been implemented in a straightforward manner, using \ref Serialization "attributes". Nodes and components that have been not been created in local mode - see the CreateMode parameter of \ref Node::CreateChild "CreateChild()" or \ref Node::CreateComponent "CreateComponent()" - will be automatically replicated. Note that a replicated component created into a local node will not be replicated, as the node's locality is checked first.
  1050. The CreateMode translates into two different node and component ID ranges - replicated ID's range from 0x1 to 0xffffff, while local ID's range from 0x1000000 to 0xffffffff. This means there is a maximum of 16777215 replicated nodes or components in a scene.
  1051. If the scene was originally loaded from a file on the server, the client will also load the scene from the same file first. In this case all predefined, static objects such as the world geometry should be defined as local nodes, so that they are not needlessly retransmitted through the network during the initial update, and do not exhaust the more limited replicated ID range.
  1052. The server can be made to transmit needed resource \ref PackageFile "packages" to the client. This requires attaching the package files to the Scene by calling \ref Scene::AddRequiredPackageFile "AddRequiredPackageFile()". On the client, a cache directory for the packages must be chosen before receiving them is possible: see \ref Network::SetPackageCacheDir "SetPackageCacheDir()".
  1053. There are some things to watch out for:
  1054. - After connecting to a server, the client should not create, update or remove non-local nodes or components on its own. However, to create client-side special effects and such, the client can freely manipulate local nodes.
  1055. - A node's \ref Node::GetVars "user variables" VariantMap will be automatically replicated on a per-variable basis. This can be useful in transmitting data shared by several components, for example the player's score or health.
  1056. - To implement interpolation, exponential smoothing of the nodes' rendering transforms is enabled on the client. It can be controlled by two properties of the Scene, the smoothing constant and the snap threshold. Snap threshold is the distance between network updates which, if exceeded, causes the node to immediately snap to the end position, instead of moving smoothly. See \ref Scene::SetSmoothingConstant "SetSmoothingConstant()" and \ref Scene::SetSnapThreshold "SetSnapThreshold()".
  1057. - Position and rotation are Node attributes, while linear and angular velocities are RigidBody attributes. To cut down on the needed network bandwidth the physics components can be created as local on the server: in this case the client will not see them at all, and will only interpolate motion based on the node's transform changes. Replicating the actual physics components allows the client to extrapolate using its own physics simulation, and to also perform collision detection, though always non-authoritatively.
  1058. - By default the physics simulation also performs interpolation to enable smooth motion when the rendering framerate is higher than the physics FPS. This should be disabled on the server scene to ensure that the clients do not receive interpolated and therefore possibly non-physical positions and rotations. See \ref PhysicsWorld::SetInterpolation "SetInterpolation()".
  1059. - AnimatedModel does not replicate animation by itself. Rather, AnimationController will replicate its command state (such as "fade this animation in, play that animation at 1.5x speed.") To turn off animation replication, create the AnimationController as local. To ensure that also the first animation update will be received correctly, always create the AnimatedModel component first, then the AnimationController.
  1060. - Networked attributes can either be in delta update or latest data mode. Delta updates are small incremental changes and must be applied in order, which may cause increased latency if there is a stall in network message delivery eg. due to packet loss. High volume data such as position, rotation and velocities are transmitted as latest data, which does not need ordering, instead this mode simply discards any old data received out of order. Note that node and component creation (when initial attributes need to be sent) and removal can also be considered as delta updates and are therefore applied in order.
  1061. - To avoid going through the whole scene when sending network updates, nodes and components explicitly mark themselves for update when necessary. When writing your own replicated C++ components, call \ref Component::MarkNetworkUpdate "MarkNetworkUpdate()" in member functions that modify any networked attribute.
  1062. - The server update logic orders replication messages so that parent nodes are created and updated before their children. Remote events are queued and only sent after the replication update to ensure that if they originate from a newly created node, it will already exist on the receiving end. However, it is also possible to specify unordered transmission for a remote event, in which case that guarantee does not hold.
  1063. - Nodes have the concept of the \ref Node::SetOwner "owner connection" (for example the player that is controlling a specific game object), which can be set in server code. This property is not replicated to the client. Messages or remote events can be used instead to tell the players what object they control.
  1064. - At least for now, there is no built-in client-side prediction.
  1065. \section Network_InterestManagement Interest management
  1066. %Scene replication includes a simple, distance-based interest management mechanism for reducing bandwidth use. To use, create the NetworkPriority component to a Node you wish to apply interest management to. The component can be created as local, as it is not important to the clients.
  1067. This component has three parameters for controlling the update frequency: \ref NetworkPriority::SetBasePriority "base priority", \ref NetworkPriority::SetDistanceFactor "distance factor", and \ref NetworkPriority::SetMinPriority "minimum priority".
  1068. A current priority value is calculated on each server update as "base priority - distance factor * distance." Additionally, it can never go lower than the minimum priority. This value is then added to an update accumulator. Whenever the update accumulator reaches 100.0, the attribute changes to the node and its components are sent, and the accumulator is reset.
  1069. The default values are base priority 100.0, distance factor 0.0, and minimum priority 0.0. This means that by default an update is always sent (which is also the case if the node has no NetworkPriority component.) Additionally, there is a rule that the node's owner connection always receives updates at full frequency. This rule can be controlled by calling \ref NetworkPriority::SetAlwaysUpdateOwner "SetAlwaysUpdateOwner()".
  1070. Calculating the distance requires the client to tell its current observer position (typically, either the camera's or the player character's world position.) This is accomplished by the client code calling \ref Connection::SetPosition "SetPosition()" on the server connection.
  1071. For now, creation and removal of nodes is always sent immediately, without consulting interest management. This is based on the assumption that nodes' motion updates consume the most bandwidth.
  1072. \section Network_Controls Client controls update
  1073. The Controls structure is used to send controls information from the client to the server, by default also at 30 FPS. This includes held down buttons, which is an application-defined 32-bit bitfield, floating point yaw and pitch, and possible extra data (for example the currently selected weapon) stored within a VariantMap.
  1074. It is up to the client code to ensure they are kept up-to-date, by calling \ref Connection::SetControls "SetControls()" on the server connection. The event E_NETWORKUPDDATE will be sent to remind of the impending update, and the event E_NETWORKUPDATESENT will be sent after the update. The controls can then be inspected on the server side by calling \ref Connection::GetControls "GetControls()".
  1075. The controls update message also includes the client's observer position for interest management.
  1076. \section Network_Messages Raw network messages
  1077. All network messages have an integer ID. The first ID you can use for custom messages is 22 (lower ID's are either reserved for kNet's or the %Network subsystem's internal use.) Messages can be sent either unreliably or reliably, in-order or unordered. The data payload is simply raw binary data that can be crafted by using for example VectorBuffer.
  1078. To send a message to a Connection, use its \ref Connection::SendMessage "SendMessage()" function. On the server, messages can also be broadcast to all client connections by calling the \ref Network::BroadcastMessage "BroadcastMessage()" function.
  1079. When a message is received, and it is not an internal protocol message, it will be forwarded as the E_NETWORKMESSAGE event. See the Chat example for details of sending and receiving.
  1080. For high performance, consider using unordered messages, because for in-order messages there is only a single channel within the connection, and all previous in-order messages must arrive first before a new one can be processed.
  1081. \section Network_RemoteEvents Remote events
  1082. A remote event consists of its event type (name hash), a flag that tells whether it is to be sent in-order or unordered, and the event data VariantMap. It can optionally set to be originate from a specific Node in the receiver's scene ("remote node event.")
  1083. To send a remote event to a Connection, use its \ref Connection::SendRemoteEvent "SendRemoteEvent()" function. To broadcast remote events to several connections at once (server only), use Network's \ref Network::BroadcastRemoteEvent "BroadcastRemoteEvent()" function.
  1084. For safety, allowed remote event types should be registered so that a client can not for example trigger an internal render update event on the server. See \ref Network::RegisterRemoteEvent "RegisterRemoteEvent()". Similarly to file paths, as long as no remote event types are registered, all are allowed.
  1085. Like with ordinary events, in script event types are strings instead of name hashes for convenience.
  1086. Remote events will always have the originating connection as a parameter in the event data. Here is how to get it in both C++ and script (in C++, include NetworkEvents.h):
  1087. C++:
  1088. \code
  1089. using namespace RemoteEventData;
  1090. Connection* remoteSender = static_cast<Connection*>(eventData[P_CONNECTION].GetPtr());
  1091. \endcode
  1092. %Script:
  1093. \code
  1094. Connection@ remoteSender = eventData["Connection"].GetConnection();
  1095. \endcode
  1096. \section Network_HttpRequests HTTP requests
  1097. In addition to UDP messaging, the network subsystem allows to make HTTP requests. Use the \ref Network::MakeHttpRequest "MakeHttpRequest()" function for this. You can specify the URL, the verb to use (default GET if empty), optional headers and optional post data. The HttpRequest object that is returned acts like a Deserializer, and you can read the response data in suitably sized chunks. After the whole response is read, the connection closes. The connection can also be closed early by allowing the request object to expire.
  1098. \page Multithreading Multithreading
  1099. Urho3D uses a task-based multithreading model. The WorkQueue subsystem can be supplied with tasks described by the WorkItem structure, by calling \ref WorkQueue::AddWorkItem "AddWorkItem()". These will be executed in background worker threads. The function \ref WorkQueue::Complete "Complete()" will complete all currently pending tasks, and execute them also in the main thread to make them finish faster.
  1100. On single-core systems no worker threads will be created, and tasks are immediately processed by the main thread instead. In the presence of more cores, a worker thread will be created for each hardware core except one which is reserved for the main thread. Hyperthreaded cores are not included, as creating worker threads also for them leads to unpredictable extra synchronization overhead.
  1101. The work items include a function pointer to call, with the signature
  1102. \verbatim
  1103. void WorkFunction(const WorkItem* item, unsigned threadIndex)
  1104. \endverbatim
  1105. The thread index ranges from 0 to n, where 0 represents the main thread and n is the number of worker threads created. Its function is to aid in splitting work into per-thread data structures that need no locking. The work item also contains three void pointers: start, end and aux, which can be used to describe a range of sub-work items, and an auxiliary data structure, which may for example be the object that originally queued the work.
  1106. Multithreading is so far not exposed to scripts, and is currently used only in a limited manner: to speed up the preparation of rendering views, including lit object and shadow caster queries, occlusion tests and particle system, animation and skinning updates. Raycasts into the Octree are also threaded, but physics raycasts are not.
  1107. Note that as the Profiler currently manages only a single hierarchy tree, profiling blocks may only appear in main thread code, not in the work functions.
  1108. \page Tools Tools
  1109. \section Tools_AssetImporter AssetImporter
  1110. Loads various 3D formats supported by Open Asset Import Library (http://assimp.sourceforge.net/) and saves Urho3D model, animation, material and scene files out of them. For the list of supported formats, look at http://assimp.sourceforge.net/main_features_formats.html.
  1111. Usage:
  1112. \verbatim
  1113. AssetImporter <command> <input file> <output file> [options]
  1114. Commands:
  1115. model Output a model
  1116. scene Output a scene
  1117. node Output a node and its children (prefab)
  1118. dump Dump scene node structure. No output file is generated
  1119. lod Combine several Urho3D models as LOD levels of the output model
  1120. Syntax: lod <dist0> <mdl0> <dist1 <mdl1> ... <output file>
  1121. Options:
  1122. -b Save scene in binary format, default format is XML
  1123. -h Generate hard instead of smooth normals if input file has no normals
  1124. -i Use local ID's for scene nodes
  1125. -l Output a material list file for models
  1126. -na Do not output animations
  1127. -nm Do not output materials
  1128. -nt Do not output material textures
  1129. -nc Do not use material diffuse color value, instead output white
  1130. -nh Do not save full node hierarchy (scene mode only)\n"
  1131. -ns Do not create subdirectories for resources
  1132. -nz Do not create a zone and a directional light (scene mode only)
  1133. -nf Do not fix infacing normals
  1134. -p <path> Set path for scene resources. Default is output file path
  1135. -r <name> Use the named scene node as root node\n"
  1136. -f <freq> Animation tick frequency to use if unspecified. Default 4800
  1137. -o Optimize redundant submeshes. Loses scene hierarchy and animations
  1138. -s <filter> Include non-skinning bones in the model's skeleton. Can be given a
  1139. case-insensitive semicolon separated filter list. Bone is included
  1140. if its name contains any of the filters. Prefix filter with minus
  1141. sign to use as an exclude. For example -s "Bip01;-Dummy;-Helper"
  1142. -t Generate tangents
  1143. -v Enable verbose Assimp library logging
  1144. -eao Interpret material emissive texture as ambient occlusion
  1145. -cm Check and do not overwrite if material exists
  1146. -ct Check and do not overwrite if texture exists
  1147. -ctn Check and do not overwrite if texture has newer timestamp
  1148. \endverbatim
  1149. The material list is a text file, one material per line, saved alongside the Urho3D model. It is used by the scene editor to automatically apply the imported default materials when setting a new model for a StaticModel, StaticModelGroup, AnimatedModel or Skybox component, and can also be manually invoked by calling \ref StaticModel::ApplyMaterialList "ApplyMaterialList()". The list files can safely be deleted if not needed.
  1150. In model or scene mode, the AssetImporter utility will also automatically save non-skeletal node animations into the output file directory.
  1151. \section Tools_OgreImporter OgreImporter
  1152. Loads OGRE .mesh.xml and .skeleton.xml files and saves them as Urho3D .mdl (model) and .ani (animation) files. For other 3D formats and whole scene importing, see AssetImporter instead. However that tool does not handle the OGRE formats as completely as this.
  1153. Usage:
  1154. \verbatim
  1155. OgreImporter <input file> <output file> [options]
  1156. Options:
  1157. -l Output a material list file
  1158. -na Do not output animations
  1159. -nm Do not output morphs
  1160. -r Output only rotations from animations
  1161. -s Split each submesh into own vertex buffer
  1162. -t Generate tangents
  1163. \endverbatim
  1164. Note: outputting only bone rotations may help when using an animation in a different model, but if bone position changes have been used for effect, the animation may become less lively. Unpredictable mutilations might result from using an animation in a model not originally intended for, as Urho3D does not specifically attempt to retarget animations.
  1165. \section Tools_PackageTool PackageTool
  1166. Examines a directory recursively for files and subdirectories and creates a PackageFile. The package file can be added to the ResourceCache and used as if the files were on a (read-only) filesystem. The file data can optionally be compressed using the LZ4 compression library.
  1167. Usage:
  1168. \verbatim
  1169. PackageTool <directory to process> <package name> [basepath] [options]
  1170. Options:
  1171. -c Enable package file LZ4 compression
  1172. \endverbatim
  1173. When PackageTool runs, it will go inside the source directory, then look for subdirectories and any files. Paths inside the package will by default be relative to the source directory, but if an extra path prefix is desired, it can be specified by the optional basepath argument.
  1174. For example, this would convert all the resource files inside the Urho3D Data directory into a package called Data.pak (execute the command from the Bin directory)
  1175. \verbatim
  1176. PackageTool Data Data.pak
  1177. \endverbatim
  1178. The -c option enables LZ4 compression on the files.
  1179. \section Tools_RampGenerator RampGenerator
  1180. Creates 1D and 2D ramp textures for use in light attenuation and spotlight spot shapes.
  1181. Usage:
  1182. \verbatim
  1183. RampGenerator <output file> <width> <power> [dimensions]
  1184. \endverbatim
  1185. The output is saved in PNG format. The power parameter is fed into the pow() function to determine ramp shape; higher value gives more brightness and more abrupt fade at the edge.
  1186. \section Tools_ScriptCompiler ScriptCompiler
  1187. Compiles AngelScript file(s) to binary bytecode for faster loading. Can also dump the %Script API in Doxygen format.
  1188. Usage:
  1189. \verbatim
  1190. ScriptCompiler <input file> [resource path for includes]
  1191. ScriptCompiler -dumpapi <Doxygen output file> [C header output file]
  1192. \endverbatim
  1193. The output files are saved with the extension .asc (compiled AngelScript.) Binary files are not automatically loaded instead of the text format (.as) script files, instead resource requests and resource references in objects need to point to the compiled files. In a final build of an application it may be convenient to simply replace the text format script files with the compiled scripts.
  1194. The script API dump mode can be used to replace the 'ScriptAPI.dox' file in the 'Docs' directory. If the output file name is not provided then the script API would be dumped to standard output (console) instead.
  1195. \section Tools_ShaderCompiler ShaderCompiler
  1196. Compiles HLSL shaders using an XML definition file that describes the shader permutations, and their associated HLSL preprocessor defines.
  1197. The output consists of shader bytecode for each permutation, as well as information of the constant parameters and texture units used. See \ref FileFormats_Shader "Binary shader format" for details.
  1198. Usage:
  1199. \verbatim
  1200. ShaderCompiler <definitionfile> <outputpath> [options]
  1201. Options:
  1202. -t <VS|PS> Compile only vertex or pixel shaders, by default compile both
  1203. -v <name> Compile only the shader variation with name
  1204. -d <define> Add a define. Add SM3 to compile for Shader Model 3
  1205. If output path is not specified, shader binaries will be output into the same
  1206. directory as the definition file. Specify a wildcard to compile multiple
  1207. shaders.
  1208. \endverbatim
  1209. The D3DX library from the DirectX runtime or SDK needs to be installed. Note that when running in Direct3D9 mode, the engine will automatically invoke ShaderCompiler if it can not find a shader in binary form. Depending on shader complexity this can take a substantial amount of time. To avoid this, execute the file CompileAllShaders.bat in the Bin directory to precompile all shader permutations. It is also highly recommended to not ship ShaderCompiler with Urho3D applications, but to instead precompile all shaders. After precompiling you can delete all .hlsl files from the CoreData directory of the shipping build.
  1210. Note that GLSL shaders in the Bin/CoreData/Shaders/GLSL directory also use similar XML definition files, but no precompiling tool exists for them; they are always compiled during runtime.
  1211. \page Unicode Unicode support
  1212. The String class supports UTF-8 encoding. However, by default strings are treated as a sequence of bytes without regard to the encoding. There is a separate
  1213. API for operating on Unicode characters, see for example \ref String::LengthUTF8 "LengthUTF8()", \ref String::AtUTF8 "AtUTF8()" and \ref String::SubstringUTF8 "SubstringUTF8()". Urho3D itself needs to be aware of the Unicode characters only in the \ref UI "user interface", when displaying text and manipulating it through user input.
  1214. On Windows, wide char strings are used in all calls to the operating system, such as accessing the command line, files, and the window title. The WString class is used as a helper for conversion. On Linux & Mac OS X 8-bit strings are used directly and they are assumed to contain UTF-8.
  1215. Note that \ref FileSystem::ScanDir "ScanDir()" function may return filenames in unnormalized Unicode on Mac OS X. Unicode re-normalization is not yet implemented.
  1216. \page FileFormats Custom file formats
  1217. Urho3D tries to use existing file formats whenever possible, and define custom file formats only when absolutely necessary. Currently used custom file formats are:
  1218. \section FileFormats_Model Binary model format (.mdl)
  1219. \verbatim
  1220. Model geometry and vertex morph data
  1221. byte[4] Identifier "UMDL"
  1222. uint Number of vertex buffers
  1223. For each vertex buffer:
  1224. uint Vertex count
  1225. uint Vertex element mask (determines vertex size)
  1226. uint Morphable vertex range start index
  1227. uint Morphable vertex count
  1228. byte[] Vertex data (vertex count * vertex size)
  1229. uint Number of index buffers
  1230. For each index buffer:
  1231. uint Index count
  1232. uint Index size (2 for 16-bit indices, 4 for 32-bit indices)
  1233. byte[] Index data (index count * index size)
  1234. uint Number of geometries
  1235. For each geometry:
  1236. uint Number of bone mapping entries
  1237. uint[] Bone mapping data, Maps geometry bone indices to global bone indices for HW skinning.
  1238. May be empty, in this case identity mapping will be used.
  1239. uint Number of LOD levels
  1240. For each LOD level:
  1241. float LOD distance
  1242. uint Primitive type (0 = triangle list, 1 = line list)
  1243. uint Vertex buffer index, starting from 0
  1244. uint Index buffer index, starting from 0
  1245. uint Draw range: index start
  1246. uint Draw range: index count
  1247. uint Number of vertex morphs (may be 0)
  1248. For each vertex morph:
  1249. cstring Name of morph
  1250. uint Number of affected vertex buffers
  1251. For each affected vertex buffer:
  1252. uint Vertex buffer index, starting from 0
  1253. uint Vertex element mask for morph data. Only positions, normals & tangents are supported.
  1254. uint Vertex count
  1255. For each vertex:
  1256. uint Vertex index
  1257. Vector3 Position (if included in the mask)
  1258. Vector3 Normal (if included in the mask)
  1259. Vector3 Tangent (if included in the mask)
  1260. Skeleton data
  1261. uint Number of bones (may be 0)
  1262. For each bone:
  1263. cstring Bone name
  1264. uint Parent bone index starting from 0. Same as own bone index for the root bone
  1265. Vector3 Initial position
  1266. Quaternion Initial rotation
  1267. Vector3 Initial scale
  1268. float[12] 4x3 offset matrix for skinning
  1269. byte Bone collision info bitmask. 1 = bounding sphere 2 = bounding box
  1270. If bounding sphere data included:
  1271. float Bone radius
  1272. If bounding box data included:
  1273. Vector3 Bone bounding box minimum
  1274. Vector3 Bone bounding box maximum
  1275. Bounding box data
  1276. Vector3 Model bounding box minimum
  1277. Vector3 Model bounding box maximum
  1278. Geometry center data
  1279. For each geometry:
  1280. Vector3 Geometry center
  1281. \endverbatim
  1282. \section FileFormats_Animation Binary animation format (.ani)
  1283. \verbatim
  1284. byte[4] Identifier "UANI"
  1285. cstring Animation name
  1286. float Length in seconds
  1287. uint Number of tracks
  1288. For each track:
  1289. cstring Track name (practically same as the bone name that should be driven)
  1290. byte Mask of included animation data. 1 = bone positions 2 = bone rotations 4 = bone scaling
  1291. uint Number of keyframes
  1292. For each keyframe:
  1293. float Time position in seconds
  1294. Vector3 Position (if included in data)
  1295. Quaternion Rotation (if included in data)
  1296. Vector3 Scale (if included in data)
  1297. \endverbatim
  1298. Note: animations are stored using absolute bone transformations. Therefore only lerp-blending between animations is supported; additive pose modification is not.
  1299. \section FileFormats_Shader Direct3D9 binary shader format (.vs2, .ps2, .vs3, .ps3)
  1300. \verbatim
  1301. byte[4] Identifier "USHD"
  1302. short Shader type (0 = vertex, 1 = pixel)
  1303. short Shader model (2 or 3)
  1304. uint Number of constant parameters
  1305. For each constant parameter:
  1306. cstring Parameter name
  1307. byte Register index
  1308. byte Number of registers
  1309. uint Number of texture units
  1310. For each texture unit:
  1311. cstring Texture unit name
  1312. byte Sampler index
  1313. uint Bytecode size
  1314. byte[] Bytecode
  1315. \endverbatim
  1316. \section FileFormats_Package Package file (.pak)
  1317. \verbatim
  1318. byte[4] Identifier "UPAK" or "ULZ4" if compressed
  1319. uint Number of file entries
  1320. uint Whole package checksum
  1321. For each file entry:
  1322. cstring Name
  1323. uint Start offset
  1324. uint Size
  1325. uint Checksum
  1326. The compressed data for each file is the following, repeated until the file is done:
  1327. ushort Uncompressed length of block
  1328. ushort Compressed length of block
  1329. byte[] Compressed data
  1330. \endverbatim
  1331. \section FileFormats_Script Compiled AngelScript (.asc)
  1332. \verbatim
  1333. byte[4] Identifier "ASBC"
  1334. byte[] Bytecode, produced by AngelScript serializer
  1335. \endverbatim
  1336. \page CodingConventions Coding conventions
  1337. - Indent style is Allman (BSD) -like, ie. brace on the next line from a control statement, indented on the same level. In switch-case statements the cases are on the same indent level as the switch statement.
  1338. - Indents use 4 spaces instead of tabs.
  1339. - Class and struct names are in camelcase beginning with an uppercase letter. They should be nouns. For example DebugRenderer, FreeTypeLibrary, Graphics.
  1340. - Functions are likewise in upper-camelcase. For example CreateComponent, SetLinearRestThreshold.
  1341. - Variables are in lower-camelcase. Member variables have an underscore appended. For example numContacts, randomSeed_.
  1342. - Constants and enumerations are in uppercase. For example Vector3::ZERO or PASS_SHADOW.
  1343. - Pointers and references append the * or & symbol to the type without a space in between. For example Drawable* drawable, Serializer& dest.
  1344. - Class definitions proceed in the following order:
  1345. - public constructors and the destructor
  1346. - public virtual functions
  1347. - public non-virtual member functions
  1348. - public static functions
  1349. - public member variables
  1350. - public static variables
  1351. - repeat all of the above in order for protected definitions, and finally private
  1352. - Header files are commented using one-line comments beginning with /// to mark them for Doxygen.
  1353. - Inline functions are defined inside the class definitions where possible, without using the inline keyword.
  1354. */
  1355. }