Răsfoiți Sursa

Merge pull request #3545 from Calinou/fix-trailing-whitespace

Remove trailing whitespace and irregular whitespace
Rémi Verschelde 5 ani în urmă
părinte
comite
12225df1e0
78 a modificat fișierele cu 777 adăugiri și 790 ștergeri
  1. 1 1
      README.md
  2. 1 1
      _templates/layout.html
  3. 6 6
      community/contributing/best_practices_for_engine_contributors.rst
  4. 1 1
      development/compiling/compiling_for_osx.rst
  5. 2 2
      development/compiling/compiling_for_uwp.rst
  6. 2 2
      development/compiling/compiling_for_windows.rst
  7. 1 1
      development/editor/creating_icons.rst
  8. 4 4
      getting_started/editor/default_key_mapping.rst
  9. 2 2
      getting_started/editor/unity_to_godot.rst
  10. 1 1
      getting_started/scripting/c_sharp/c_sharp_features.rst
  11. 0 2
      getting_started/scripting/c_sharp/index.rst
  12. 2 2
      getting_started/scripting/creating_script_templates.rst
  13. 8 8
      getting_started/scripting/gdscript/gdscript_exports.rst
  14. 3 3
      getting_started/scripting/gdscript/static_typing.rst
  15. 11 11
      getting_started/step_by_step/exporting.rst
  16. 2 2
      getting_started/step_by_step/intro_to_the_editor_interface.rst
  17. 1 1
      getting_started/step_by_step/resources.rst
  18. 2 2
      getting_started/workflow/assets/escn_exporter/animation.rst
  19. 0 1
      getting_started/workflow/assets/escn_exporter/lights.rst
  20. 2 2
      getting_started/workflow/assets/escn_exporter/material.rst
  21. 1 1
      getting_started/workflow/assets/importing_images.rst
  22. 2 2
      getting_started/workflow/assets/importing_scenes.rst
  23. 1 1
      getting_started/workflow/best_practices/godot_interfaces.rst
  24. 1 1
      tutorials/2d/2d_meshes.rst
  25. 2 2
      tutorials/2d/2d_sprite_animation.rst
  26. 1 1
      tutorials/2d/canvas_layers.rst
  27. 11 11
      tutorials/3d/fps_tutorial/part_one.rst
  28. 2 3
      tutorials/3d/fps_tutorial/part_three.rst
  29. 2 2
      tutorials/3d/fps_tutorial/part_two.rst
  30. 1 1
      tutorials/3d/standard_material_3d.rst
  31. 1 1
      tutorials/3d/using_gridmaps.rst
  32. 54 54
      tutorials/3d/vertex_animation/animating_thousands_of_fish.rst
  33. 20 20
      tutorials/3d/vertex_animation/controlling_thousands_of_fish.rst
  34. 1 1
      tutorials/animation/cutout_animation.rst
  35. 2 2
      tutorials/animation/introduction_2d.rst
  36. 0 2
      tutorials/audio/index.rst
  37. 2 2
      tutorials/audio/recording_with_microphone.rst
  38. 0 1
      tutorials/content/index.rst
  39. 2 2
      tutorials/content/procedural_geometry/arraymesh.rst
  40. 26 26
      tutorials/content/procedural_geometry/index.rst
  41. 2 2
      tutorials/content/procedural_geometry/meshdatatool.rst
  42. 0 1
      tutorials/debug/index.rst
  43. 3 3
      tutorials/gui/bbcode_in_richtextlabel.rst
  44. 24 24
      tutorials/gui/gui_containers.rst
  45. 0 2
      tutorials/gui/index.rst
  46. 3 3
      tutorials/io/files/resource_queue.gd
  47. 1 1
      tutorials/math/beziers_and_curves.rst
  48. 1 1
      tutorials/misc/jitter_stutter.rst
  49. 11 12
      tutorials/misc/state_design_pattern.rst
  50. 15 15
      tutorials/networking/websocket.rst
  51. 5 5
      tutorials/optimization/using_servers.rst
  52. 2 4
      tutorials/physics/soft_body.rst
  53. 1 1
      tutorials/platform/consoles.rst
  54. 58 58
      tutorials/platform/customizing_html5_shell.rst
  55. 15 15
      tutorials/platform/html5_shell_classref.rst
  56. 13 13
      tutorials/plugins/android/android_plugin.rst
  57. 12 12
      tutorials/plugins/editor/making_plugins.rst
  58. 26 26
      tutorials/plugins/editor/spatial_gizmos.rst
  59. 8 8
      tutorials/plugins/editor/visual_shader_plugins.rst
  60. 1 1
      tutorials/plugins/gdnative/gdnative-cpp-example.rst
  61. 51 51
      tutorials/shading/advanced_postprocessing.rst
  62. 3 3
      tutorials/shading/godot_shader_language_style_guide.rst
  63. 4 4
      tutorials/shading/shader_materials.rst
  64. 18 18
      tutorials/shading/shading_reference/canvas_item_shader.rst
  65. 9 9
      tutorials/shading/shading_reference/particle_shader.rst
  66. 15 15
      tutorials/shading/shading_reference/shaders.rst
  67. 25 25
      tutorials/shading/shading_reference/shading_language.rst
  68. 16 16
      tutorials/shading/shading_reference/sky_shader.rst
  69. 12 12
      tutorials/shading/shading_reference/spatial_shader.rst
  70. 1 1
      tutorials/shading/your_first_shader/index.rst
  71. 20 20
      tutorials/shading/your_first_shader/what_are_shaders.rst
  72. 18 18
      tutorials/shading/your_first_shader/your_first_canvasitem_shader.rst
  73. 34 34
      tutorials/shading/your_first_shader/your_first_spatial_shader.rst
  74. 35 35
      tutorials/shading/your_first_shader/your_second_spatial_shader.rst
  75. 6 6
      tutorials/viewports/using_viewport_as_texture.rst
  76. 1 1
      tutorials/vr/developing_for_oculus_quest.rst
  77. 70 70
      tutorials/vr/vr_starter_tutorial/vr_starter_tutorial_part_one.rst
  78. 54 54
      tutorials/vr/vr_starter_tutorial/vr_starter_tutorial_part_two.rst

+ 1 - 1
README.md

@@ -69,7 +69,7 @@ Building the documentation requires at least 8 GB of RAM to be done without swap
 # On Linux/macOS
 make html SPHINXOPTS=-j2
 
-# On Windows
+# On Windows
 set SPHINXOPTS=-j2 && make html
 ```
 

+ 1 - 1
_templates/layout.html

@@ -7,7 +7,7 @@
   <link rel="alternate" hreflang="{{ alternate_lang_href }}" href="{{ godot_docs_basepath }}{{ alternate_lang }}/{{ godot_canonical_version }}/{{ pagename }}{{ godot_docs_suffix }}" />
   {% endfor -%}
   <link rel="alternate" hreflang="x-default" href="{{ godot_docs_basepath }}{{ godot_default_lang }}/{{ godot_canonical_version }}/{{ pagename }}{{ godot_docs_suffix }}" />
-  
+
   <link rel="canonical" href="{{ godot_docs_basepath }}{{ lang_attr }}/{{ godot_canonical_version }}/{{ pagename }}{{ godot_docs_suffix }}" />
   {% endif -%}
   {{ super() }}

+ 6 - 6
community/contributing/best_practices_for_engine_contributors.rst

@@ -38,7 +38,7 @@ This is a variation of the previous practice. I believe most developers agree th
 
 The answer to this question is that the problem needs to *exist*. It must not be speculation or a belief. The user must be using the software as intended to create something they *need*. In this process, the user may stumble into a problem that requires a solution in order to continue, or in order to achieve greater productivity. In this case, *a solution is needed*.
 
-Believing that problems may arise in the future and that the software needs to be ready to solve them by the time they appear is called *"Future proofing"* and its characterized by lines of thought such as: 
+Believing that problems may arise in the future and that the software needs to be ready to solve them by the time they appear is called *"Future proofing"* and its characterized by lines of thought such as:
 
 - I think it would be useful for users to...
 - I think users will eventually need to...
@@ -72,16 +72,16 @@ Because of this, user proposed solutions don't always contemplate other use case
 
 .. image:: img/best_practices4.png
 
-For developers, the perspective is different. They may find the user's problem too unique to justify a solution (instead of a user workaround), or maybe they will suggest a partial (usually simpler or lower level) solution that applies to a wider range of known problems, and leave the rest of the solution up to the user. 
+For developers, the perspective is different. They may find the user's problem too unique to justify a solution (instead of a user workaround), or maybe they will suggest a partial (usually simpler or lower level) solution that applies to a wider range of known problems, and leave the rest of the solution up to the user.
 
-In any case, before attempting a contribution, it is important to discuss the actual problems with the other developers or contributors, so a better agreement on implementation can be reached. 
+In any case, before attempting a contribution, it is important to discuss the actual problems with the other developers or contributors, so a better agreement on implementation can be reached.
 
 The only exception, in this case, is when an area of code has a clear owner (agreed by the other contributors), who talks to users directly and has the most knowledge to implement a solution directly.
 
 #5: To each problem, its own solution
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-For programmers, it is always a most enjoyable challenge to find the most optimal solutions to problems. Things, however, may go overboard sometimes and programmers will try to come up with solutions that solve as many problems as possible. 
+For programmers, it is always a most enjoyable challenge to find the most optimal solutions to problems. Things, however, may go overboard sometimes and programmers will try to come up with solutions that solve as many problems as possible.
 
 The situation will often take a turn for the worse when, in order to make this solution appear even more fantastic and flexible, the pure speculation-based problems (as described in #2) also make their appearance on stage.
 
@@ -98,7 +98,7 @@ Big and flexible solutions also have an additional drawback which is that, over
 
 This is a continuation of the previous point, which further explains why this way of thinking and designing software is preferred.
 
-As mentioned before (in point #2), it is very difficult for us (as human beings who design software) to actually understand all future user needs. Trying to write very flexible structures that cater to many use cases at once is often a mistake. 
+As mentioned before (in point #2), it is very difficult for us (as human beings who design software) to actually understand all future user needs. Trying to write very flexible structures that cater to many use cases at once is often a mistake.
 
 We may come up with something we believe is brilliant, but when it's actually used, we will find that users will never even use half of it, or that they will require features that don't quite accommodate our original design, forcing us to either throw it away or make it even more complex.
 
@@ -115,7 +115,7 @@ In real-life scenarios, these use cases will be at most rare and uncommon anyway
 
 When looking for a solution to a problem, be it implementing a new feature or fixing a bug, sometimes the easiest path is to add data or a new function in the core layers of code.
 
-The main problem here is, adding something to the core layers that will only be used from a single location far away will not only make the code more difficult to follow (split in two), but also make the core API larger, more complex, more difficult to understand in general. 
+The main problem here is, adding something to the core layers that will only be used from a single location far away will not only make the code more difficult to follow (split in two), but also make the core API larger, more complex, more difficult to understand in general.
 
 This is bad, because readability and cleanness of core APIs is always of extreme importance given how much code relies on it, and because it's key for new contributors as a starting point to learning the codebase.
 

+ 1 - 1
development/compiling/compiling_for_osx.rst

@@ -17,7 +17,7 @@ For compiling under macOS, the following is required:
 - *Optional* - `yasm <https://yasm.tortall.net/>`_ (for WebM SIMD optimizations).
 
 If you are building the ``master`` branch:
-- Download and install the `Vulkan SDK for macOS <https://vulkan.lunarg.com/sdk/home>`__. 
+- Download and install the `Vulkan SDK for macOS <https://vulkan.lunarg.com/sdk/home>`__.
 
 .. note:: If you have `Homebrew <https://brew.sh/>`_ installed, you can easily
           install SCons and yasm using the following command::

+ 2 - 2
development/compiling/compiling_for_uwp.rst

@@ -18,11 +18,11 @@ Requirements
 
 .. note:: The ANGLE repo by Microsoft has been discontinued and the
           ``ms_master`` branch has been cleared out.
-          
+
           As a temporary workaround however, it is still possible to
           download an older state of the source code via commit
           `c61d048 <https://github.com/microsoft/angle/tree/c61d0488abd9663e0d4d2450db7345baa2c0dfb6>`__.
-          
+
           This page will eventually be updated in the future to reflect
           the new build instructions.
 

+ 2 - 2
development/compiling/compiling_for_windows.rst

@@ -17,7 +17,7 @@ For compiling under Windows, the following is required:
 - `MinGW-w64 <http://mingw-w64.org/>`_ with GCC can be used as an alternative to
   Visual Studio. Be sure to install/configure it to use the ``posix`` thread model.
   **Important:** When using MinGW to compile the ``master`` branch, you need GCC 9 or later. Because
-  MinGW does not officially release GCC 9 yet, you can get an alternate installer from 
+  MinGW does not officially release GCC 9 yet, you can get an alternate installer from
   `here <https://jmeubank.github.io/tdm-gcc/articles/2020-03/9.2.0-release>`_.
 - `Python 3.5+ <https://www.python.org/downloads/windows/>`_.
 - `SCons 3.0 <https://www.scons.org/>`_ build system. If using Visual Studio 2019,
@@ -36,7 +36,7 @@ For compiling under Windows, the following is required:
                   mingw-w64-x86_64-gcc mingw-w64-x86_64-yasm \
                   mingw-w64-i686-python3-pip mingw-w64-i686-gcc \
                   mingw-w64-i686-yasm make
-              
+
           For each MSYS2 MinGW subsystem, you should then run
           `pip install scons` in its shell.
 

+ 1 - 1
development/editor/creating_icons.rst

@@ -62,7 +62,7 @@ optimized before being added to the engine, to do so:
 
        svgcleaner --multipass svg_source.svg svg_optimized.svg
 
-The ``--multipass`` switch improves compression, so make sure to include it.
+The ``--multipass`` switch improves compression, so make sure to include it.
 The optimized icon will be saved to ``svg_optimized.svg``. You can also change
 the destination parameter to any relative or absolute path you'd like.
 

+ 4 - 4
getting_started/editor/default_key_mapping.rst

@@ -3,18 +3,18 @@
 Default editor shortcuts
 ========================
 Many of Godot Editor functions can be executed with keyboard shortcuts. This page
-lists functions which have associated shortcuts by default, but many others are 
-available for customization in editor settings as well. To change keys associated 
+lists functions which have associated shortcuts by default, but many others are
+available for customization in editor settings as well. To change keys associated
 with these and other actions navigate to ``Editor -> Editor Settings -> Shortcuts``.
 
-While some actions are universal, a lot of shortcuts are specific to individual 
+While some actions are universal, a lot of shortcuts are specific to individual
 tools. For this reason it is possible for some key combinations to be assigned
 to more than one function. The correct action will be performed depending on the
 context.
 
 .. note:: While Windows and Linux builds of the editor share most of the default settings,
           some shortcuts may differ for macOS version. This is done for better integration
-          of the editor into macOS ecosystem. Users fluent with standard shortcuts on that 
+          of the editor into macOS ecosystem. Users fluent with standard shortcuts on that
           OS should find Godot Editor's default key mapping intuitive.
 
 General Editor Actions

+ 2 - 2
getting_started/editor/unity_to_godot.rst

@@ -219,7 +219,7 @@ Connections: groups and signals
 
 You can control nodes by accessing them via script and calling built-in
 or user-defined functions on them. You can also place nodes in a group
-and call functions on all nodes in this group. See more in the 
+and call functions on all nodes in this group. See more in the
 :ref:`scripting documentation <doc_scripting_continued>`.
 
 Nodes can send a signal when a specified action occurs. A signal can
@@ -246,7 +246,7 @@ Creating a script that inherits :ref:`class_Resource` will allow you to create c
 Using Godot in C++
 ------------------
 
-Godot allows you to develop your project directly in C++ by using its API, which is not possible with Unity at the moment. 
+Godot allows you to develop your project directly in C++ by using its API, which is not possible with Unity at the moment.
 As an example, you can consider Godot Engine's editor as a "game" written in C++ using the Godot API.
 
 If you are interested in using Godot in C++, you may want to start reading the :ref:`Developing in

+ 1 - 1
getting_started/scripting/c_sharp/c_sharp_features.rst

@@ -96,7 +96,7 @@ Declaring a signal in C# is done with the ``[Signal]`` attribute on a delegate.
     delegate void MySignalWithArguments(string foo, int bar);
 
 These signals can then be connected either in the editor or from code with ``Connect``.
-If you want to connect a signal in the editor, you need to (re)build the project assemblies to see the new signal. This build can be manually triggered by clicking the “Build” button at the top right corner of the editor window. 
+If you want to connect a signal in the editor, you need to (re)build the project assemblies to see the new signal. This build can be manually triggered by clicking the “Build” button at the top right corner of the editor window.
 
 .. code-block:: csharp
 

+ 0 - 2
getting_started/scripting/c_sharp/index.rst

@@ -9,5 +9,3 @@ C#
    c_sharp_features
    c_sharp_differences
    c_sharp_style_guide
-
-

+ 2 - 2
getting_started/scripting/creating_script_templates.rst

@@ -102,13 +102,13 @@ other templates.
         // Called when the node enters the scene tree for the first time.
         public override void _Ready()
         {
-            
+
         }
 
     //  // Called every frame. 'delta' is the elapsed time since the previous frame.
     //  public override void _Process(float delta)
     //  {
-    //      
+    //
     //  }
     }
 

+ 8 - 8
getting_started/scripting/gdscript/gdscript_exports.rst

@@ -111,17 +111,17 @@ Examples
     export(Color, RGB) var col
     # Color given as red-green-blue-alpha value.
     export(Color, RGBA) var col
-    
+
     # Nodes
-    
+
     # Another node in the scene can be exported as a NodePath.
     export(NodePath) var node_path
     # Do take note that the node itself isn't being exported -
     # there is one more step to call the true node:
     var node = get_node(node_path)
-    
+
     # Resources
-    
+
     export(Resource) var resource
     # In the Inspector, you can then drag and drop a resource file
     # from the FileSystem dock into the variable slot.
@@ -164,7 +164,7 @@ cause them to change in all other instances. Exported arrays can have
 initializers, but they must be constant expressions.
 
 If the exported array specifies a type which inherits from Resource, the array
-values can be set in the inspector by dragging and dropping multiple files 
+values can be set in the inspector by dragging and dropping multiple files
 from the FileSystem dock at once.
 
 ::
@@ -227,7 +227,7 @@ described in :ref:`doc_accessing_data_or_logic_from_object`.
 
 .. seealso:: For binding properties using the above methods in C++, see
              :ref:`doc_binding_properties_using_set_get_property_list`.
-             
+
 .. warning:: The script must operate in the ``tool`` mode so the above methods
              can work from within the editor.
 
@@ -250,7 +250,7 @@ example of a built-in category.
             }
         )
         return properties
-        
+
 * ``name`` is the name of a category to be added to the inspector;
 
 * ``PROPERTY_USAGE_CATEGORY`` indicates that the property should be treated as a
@@ -273,7 +273,7 @@ A list of properties with similar names can be grouped.
                 usage = PROPERTY_USAGE_GROUP | PROPERTY_USAGE_SCRIPT_VARIABLE
         })
         return properties
-        
+
 * ``name`` is the name of a group which is going to be displayed as collapsible
   list of properties;
 

+ 3 - 3
getting_started/scripting/gdscript/static_typing.rst

@@ -53,7 +53,7 @@ to issues at runtime, but lets you decide whether or not you want to
 leave the code as it is. More on that in a moment.
 
 Static types also give you better code completion options. Below, you
-can see the difference between a dynamic and a static typed completion
+can see the difference between a dynamic and a static typed completion
 options for a class called ``PlayerController``.
 
 You've probably stored a node in a variable before, and typed a dot to
@@ -240,7 +240,7 @@ You can also use your own nodes as return types:
         var item: Item = find_item(reference)
         if not item:
             item = ItemDatabase.get_instance(reference)
-            
+
         item.amount += amount
         return item
 
@@ -299,7 +299,7 @@ And the same callback, with type hints:
     func _on_area_entered(area: CollisionObject2D) -> void:
         pass
 
-You're free to replace, e.g. the ``CollisionObject2D``, with your own type,
+You're free to replace, e.g. the ``CollisionObject2D``, with your own type,
 to cast parameters automatically:
 
 ::

+ 11 - 11
getting_started/step_by_step/exporting.rst

@@ -29,19 +29,19 @@ be treated the same as a touch event, we'll convert the game to a click-and-move
 input style.
 
 By default Godot emulates mouse input from touch input. That means if anything
-is coded to happen on a mouse event, touch will trigger it as well. Godot can also 
+is coded to happen on a mouse event, touch will trigger it as well. Godot can also
 emulate touch input from mouse clicks, which we will need to be able to keep playing
-our game on our computer after we switch to touch input. In the "Project Settings" 
-under *Input Devices* and *Pointing*, set *Emulate Touch From Mouse* to "On". 
+our game on our computer after we switch to touch input. In the "Project Settings"
+under *Input Devices* and *Pointing*, set *Emulate Touch From Mouse* to "On".
 
 .. image:: img/export_touchsettings.png
 
 We also want to ensure that the game scales consistently on different-sized screens,
-so in the project settings go to *Display*, then click on *Window*. In the *Stretch* 
-options, set *Mode* to "2d" and *Aspect* to "keep". 
+so in the project settings go to *Display*, then click on *Window*. In the *Stretch*
+options, set *Mode* to "2d" and *Aspect* to "keep".
 
 Since we are already in the *Window* settings, we should also set under *Handheld*
-the *Orientation* to "portrait". 
+the *Orientation* to "portrait".
 
 .. image:: img/export_handheld_stretchsettings.png
 
@@ -104,7 +104,7 @@ changed:
 
         position += velocity * delta
         # We still need to clamp the player's position here because on devices that don't
-        # match your game's aspect ratio, Godot will try to maintain it as much as possible 
+        # match your game's aspect ratio, Godot will try to maintain it as much as possible
         # by creating black borders, if necessary.
         # Without clamp(), the player would be able to move under those borders.
         position.x = clamp(position.x, 0, screen_size.x)
@@ -210,7 +210,7 @@ changed:
             // We still need to clamp the player's position here because on devices that don't
             // match your game's aspect ratio, Godot will try to maintain it as much as possible
             // by creating black borders, if necessary.
-            // Without clamp(), the player would be able to move under those borders. 
+            // Without clamp(), the player would be able to move under those borders.
             Position = new Vector2(
                 x: Mathf.Clamp(Position.x, 0, _screenSize.x),
                 y: Mathf.Clamp(Position.y, 0, _screenSize.y)
@@ -239,8 +239,8 @@ changed:
 Setting a main scene
 --------------------
 
-The main scene is the one that your game will start in. In *Project -> Project 
-Settings -> Application -> Run*, set *Main Scene* to "Main.tscn" by clicking 
+The main scene is the one that your game will start in. In *Project -> Project
+Settings -> Application -> Run*, set *Main Scene* to "Main.tscn" by clicking
 the folder icon and selecting it.
 
 Export templates
@@ -339,7 +339,7 @@ your system and the location of the keystore you just created.
 .. image:: img/export_editor_android_settings.png
 
 Now you're ready to export. Click on *Project -> Export* and add a preset
-for Android (see above). Select the Android Presets and under *Options* go to 
+for Android (see above). Select the Android Presets and under *Options* go to
 *Screen* and set *Orientation* to "Portrait".
 
 Click the "Export Project" button and Godot will build an APK you can download

+ 2 - 2
getting_started/step_by_step/intro_to_the_editor_interface.rst

@@ -116,7 +116,7 @@ You can see four workspace buttons at the top: 2D, 3D, Script and
 AssetLib.
 
 You'll use the **2D workspace** for all types of games. In addition to 2D games,
-the 2D workspace is where you'll build your interfaces. Press :kbd:`F1` 
+the 2D workspace is where you'll build your interfaces. Press :kbd:`F1`
 (or :kbd:`Alt + 1` on macOS) to access it.
 
 .. image:: img/editor_ui_intro_editor_04_2d_workspace.png
@@ -134,7 +134,7 @@ related to the 3D viewport.
 .. note:: Read :ref:`doc_introduction_to_3d` for more detail about **3D workspace**.
 
 The **Script** workspace is a complete code editor with a debugger, rich
-auto-completion, and built-in code reference. Press :kbd:`F3` (or :kbd:`Alt + 3` on macOS) 
+auto-completion, and built-in code reference. Press :kbd:`F3` (or :kbd:`Alt + 3` on macOS)
 to access it, and :kbd:`F4` to search the reference.
 
 .. image:: img/editor_ui_intro_editor_06_script_workspace_expanded.png

+ 1 - 1
getting_started/step_by_step/resources.rst

@@ -185,7 +185,7 @@ and :ref:`Resource <class_Resource>` features:
     Resources and Dictionaries are both passed by reference, but only Resources are
     reference-counted. This means that if a Dictionary is passed between objects and
     the first object is deleted, all other objects' references to the Dictionary will
-    be invalidated. Conversely, Resources will not be freed from memory until *all* the 
+    be invalidated. Conversely, Resources will not be freed from memory until *all* the
     objects are deleted.
 
     .. tabs::

+ 2 - 2
getting_started/workflow/assets/escn_exporter/animation.rst

@@ -18,8 +18,8 @@ to use this feature:
 
 **1. Stash active action**
 
-New created action is always an active action bound to object. There are 
-several ways to place an active action into NLA track, 
+New created action is always an active action bound to object. There are
+several ways to place an active action into NLA track,
 one is of course doing it in ``NLA Editor``
 
 .. image:: img/nla_editor.jpg

+ 0 - 1
getting_started/workflow/assets/escn_exporter/lights.rst

@@ -24,4 +24,3 @@ There are some things to note:
    The exporter attempts to make them similar, but it doesn't always look the
    same.
  - There is no difference between buffer shadow and ray shadow in the export.
-

+ 2 - 2
getting_started/workflow/assets/escn_exporter/material.rst

@@ -37,7 +37,7 @@ Export of Cycles/EEVEE materials
 --------------------------------
 
 The exporter has a primitive support for converting Cycles/EEVEE material node tree
-to Godot Shader Material. Note that some of the Shader Node are not supported yet due to 
+to Godot Shader Material. Note that some of the Shader Node are not supported yet due to
 difficulties in implementation, which are:
 
 - all the ``noisy textures``
@@ -60,7 +60,7 @@ Generate external materials
 
 The default configuration of material exporting would keep all the materials internal to
 the ``escn`` file. There is an option which could enable generating external ``.material``
-file when the ``escn`` file opens in Godot. 
+file when the ``escn`` file opens in Godot.
 
 .. image:: img/external_mat_option.jpg
 

+ 1 - 1
getting_started/workflow/assets/importing_images.rst

@@ -137,7 +137,7 @@ Note that RGTC compression affects the resulting normal map image. You will have
 
   More information about normal maps (including a coordinate order table for
   popular engines) can be found
-  `here <http://wiki.polycount.com/wiki/Normal_Map_Technical_Details>`__. 
+  `here <http://wiki.polycount.com/wiki/Normal_Map_Technical_Details>`__.
 
 Flags
 -----

+ 2 - 2
getting_started/workflow/assets/importing_scenes.rst

@@ -22,7 +22,7 @@ Godot supports the following 3D *scene file formats*:
 
 Just copy the scene file together with the texture to the project repository, and Godot will do a full import.
 
-It is important that the mesh is not deformed by bones when exporting. Make sure that the skeleton is reset to its T-pose 
+It is important that the mesh is not deformed by bones when exporting. Make sure that the skeleton is reset to its T-pose
 or default rest pose before exporting with your favorite 3D editor.
 
 Exporting DAE files from Maya and 3DS Max
@@ -38,7 +38,7 @@ with the latest version of the software.
 Exporting glTF 2.0 files from Blender
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-There are three ways to export glTF files from Blender. As a glTF binary (``.glb`` file), glTF embedded (``.gltf`` file), 
+There are three ways to export glTF files from Blender. As a glTF binary (``.glb`` file), glTF embedded (``.gltf`` file),
 and with textures (``gltf`` + ``.bin`` + textures).
 
 glTF binary files are the smallest of the three options. They include the mesh and textures set up in Blender.

+ 1 - 1
getting_started/workflow/best_practices/godot_interfaces.rst

@@ -237,7 +237,7 @@ Nodes likewise have an alternative access point: the SceneTree.
             GD.Print(globals.my_getter());
         }
     };
-    
+
 .. _doc_accessing_data_or_logic_from_object:
 
 Accessing data or logic from an object

+ 1 - 1
tutorials/2d/2d_meshes.rst

@@ -22,7 +22,7 @@ Optimizing pixels drawn
 
 This workflow is useful for optimizing 2D drawing in some situations. When drawing large images with transparency, Godot will draw the whole quad to the screen. The large transparent areas will still be drawn.
 
-This can affect performance, especially on mobile devices, when drawing very large images (generally screen sized), 
+This can affect performance, especially on mobile devices, when drawing very large images (generally screen sized),
 or layering multiple images on top of each other with large transparent areas (for example, when using ``ParallaxBackground``).
 
 Converting to a mesh will ensure that only the opaque parts will be drawn and the rest will be ignored.

+ 2 - 2
tutorials/2d/2d_sprite_animation.rst

@@ -16,7 +16,7 @@ animate a collection of individual images. Then we will animate a sprite sheet u
 with :ref:`AnimationPlayer <class_AnimationPlayer>` and the *Animation*
 property of :ref:`Sprite <class_Sprite>`.
 
-.. note:: Art for the following examples by https://opengameart.org/users/ansimuz and by 
+.. note:: Art for the following examples by https://opengameart.org/users/ansimuz and by
                                            https://opengameart.org/users/tgfcoder
 
 Individual images with AnimatedSprite
@@ -102,7 +102,7 @@ Click on the new SpriteFrames resource.  This time, when the bottom panel appear
 
 .. image:: img/2d_animation_add_from_spritesheet.png
 
-You will be prompted to open a file.  Select your sprite sheet.  
+You will be prompted to open a file.  Select your sprite sheet.
 
 A new window will open, showing your sprite sheet.  The first thing you will need to do is to change the number of vertical and horizontal images in your sprite sheet.  In this sprite sheet, we have four images horizontally and two images vertically.
 

+ 1 - 1
tutorials/2d/canvas_layers.rst

@@ -59,7 +59,7 @@ CanvasLayers are independent of tree order, and they only depend on
 their layer number, so they can be instantiated when needed.
 
 .. note::   CanvasLayers aren't necessary to control the drawing order of nodes.
-            The standard way to ensuring that a node is  correctly drawn 'in front' or 'behind' others is to manipulate the  
+            The standard way to ensuring that a node is  correctly drawn 'in front' or 'behind' others is to manipulate the
             order of the nodes in the scene panel. Perhaps counterintuitively, the topmost nodes in the scene panel are drawn
             on *behind* lower ones in the viewport. 2d nodes also have a property for controlling their drawing order
             (see :ref:`Node2D.z_index <class_Node2D_property_z_index>`).

+ 11 - 11
tutorials/3d/fps_tutorial/part_one.rst

@@ -482,9 +482,9 @@ If you want to move using the world space directional vectors, you'd do somethin
         node.translate(Vector3(1, 0, 0))
     if Input.is_action_pressed("movement_right"):
         node.translate(Vector3(-1, 0, 0))
-        
+
  .. code-tab:: csharp
- 
+
     if (Input.IsActionPressed("movement_forward"))
         node.Translate(new Vector3(0, 0, 1));
     if (Input.IsActionPressed("movement_backward"))
@@ -542,9 +542,9 @@ To use the :ref:`Spatial <class_Spatial>` node's local directional vectors, we u
         node.translate(node.global_transform.basis.x.normalized())
     if Input.is_action_pressed("movement_right"):
         node.translate(-node.global_transform.basis.x.normalized())
-        
+
  .. code-tab:: csharp
-        
+
     if (Input.IsActionPressed("movement_forward"))
         node.Translate(node.GlobalTransform.basis.z.Normalized());
     if (Input.IsActionPressed("movement_backward"))
@@ -711,7 +711,7 @@ First we need a few more class variables in our player script:
     [Export]
     public float SprintAccel = 18.0f;
     private bool _isSprinting = false;
-    
+
     private SpotLight _flashlight;
 
 All the sprinting variables work exactly the same as the non sprinting variables with
@@ -728,7 +728,7 @@ Now we need to add a few lines of code, starting in ``_ready``. Add the followin
     flashlight = $Rotation_Helper/Flashlight
 
  .. code-tab:: csharp
- 
+
     _flashlight = GetNode<SpotLight>("Rotation_Helper/Flashlight");
 
 This gets the ``Flashlight`` node and assigns it to the ``flashlight`` variable.
@@ -758,7 +758,7 @@ Now we need to change some of the code in ``process_input``. Add the following s
     # ----------------------------------
 
  .. code-tab:: csharp
- 
+
     //  -------------------------------------------------------------------
     //  Sprinting
     if (Input.IsActionPressed("movement_sprint"))
@@ -799,12 +799,12 @@ Now we need to change a couple things in ``process_movement``. First, replace ``
         target *= MAX_SPEED
 
  .. code-tab:: csharp
- 
+
     if (_isSprinting)
-        target *= MaxSprintSpeed;    
+        target *= MaxSprintSpeed;
     else
         target *= MaxSpeed;
-            
+
 Now instead of always multiplying ``target`` by ``MAX_SPEED``, we first check to see if the player is sprinting or not.
 If the player is sprinting, we instead multiply ``target`` by ``MAX_SPRINT_SPEED``.
 
@@ -819,7 +819,7 @@ Now all that's left is to change the acceleration when sprinting. Change ``accel
         accel = ACCEL
 
  .. code-tab:: csharp
- 
+
     if (_isSprinting)
         accel = SprintAccel;
     else

+ 2 - 3
tutorials/3d/fps_tutorial/part_three.rst

@@ -23,10 +23,10 @@ Changing levels
 
 Now that we have a fully working FPS, let's move to a more FPS-like level.
 
-Open up ``Space_Level.tscn`` (``assets/Space_Level_Objects/Space_Level.tscn``) 
+Open up ``Space_Level.tscn`` (``assets/Space_Level_Objects/Space_Level.tscn``)
 and/or ``Ruins_Level.tscn`` (``assets/Ruin_Level_Objects/Ruins_Level.tscn``).
 
-``Space_Level.tscn`` and ``Ruins_Level.tscn`` are complete custom FPS levels 
+``Space_Level.tscn`` and ``Ruins_Level.tscn`` are complete custom FPS levels
 created for the purpose of this tutorial. Press ``Play Current Scene`` button,
 or :kbd:`F6` on keyboard, and give each a try.
 
@@ -683,4 +683,3 @@ We're also going to add joypad support, so we can play with wired Xbox 360 contr
 .. warning:: If you ever get lost, be sure to read over the code again!
 
              You can download the finished project for this part here: :download:`Godot_FPS_Part_3.zip <files/Godot_FPS_Part_3.zip>`
-

+ 2 - 2
tutorials/3d/fps_tutorial/part_two.rst

@@ -291,10 +291,10 @@ reach the point where the muzzle starts to flash.
 
 .. note:: The timeline is the window where all the points in our animation are stored. Each of the little
           points represents a point of animation data.
-          
+
           To actually preview the "Pistol_fire" animation, select the :ref:`Camera <class_Camera>` node
           underneath Rotation Helper and check the "Preview" box underneath Perspective in the top-left corner.
-            
+
 
           Scrubbing the timeline means moving ourselves through the animation. So when we say "scrub the timeline
           until you reach a point", what we mean is move through the animation window until you reach the point

+ 1 - 1
tutorials/3d/standard_material_3d.rst

@@ -373,7 +373,7 @@ and wider compatibility.
 
   More information about normal maps (including a coordinate order table for
   popular engines) can be found
-  `here <http://wiki.polycount.com/wiki/Normal_Map_Technical_Details>`__. 
+  `here <http://wiki.polycount.com/wiki/Normal_Map_Technical_Details>`__.
 
 Rim
 ~~~

+ 1 - 1
tutorials/3d/using_gridmaps.rst

@@ -85,7 +85,7 @@ The "Cell/Size" property should be set to the size of your meshes. You can leave
 it at the default value for the demo. Set the "Center Y" property to "Off".
 
 Now you can start designing the level by choosing a tile from the palette and
-placing it with Left-Click in the editor window. To remove a tile, hold :kbd:`Shift` 
+placing it with Left-Click in the editor window. To remove a tile, hold :kbd:`Shift`
 and use Right-click.
 
 Click on the "GridMap" menu to see options and shortcuts. For example, pressing

+ 54 - 54
tutorials/3d/vertex_animation/animating_thousands_of_fish.rst

@@ -3,34 +3,34 @@
 Animating thousands of fish with MultiMeshInstance
 ==================================================
 
-This tutorial explores a technique used in the game `ABZU <https://www.gdcvault.com/play/1024409/Creating-the-Art-of-ABZ>`_ 
-for rendering and animating thousands of fish using vertex animation and 
+This tutorial explores a technique used in the game `ABZU <https://www.gdcvault.com/play/1024409/Creating-the-Art-of-ABZ>`_
+for rendering and animating thousands of fish using vertex animation and
 static mesh instancing.
 
-In Godot, this can be accomplished with a custom :ref:`Shader <class_Shader>` and 
-a :ref:`MultiMeshInstance <class_MultiMeshInstance>`. Using the following technique you 
+In Godot, this can be accomplished with a custom :ref:`Shader <class_Shader>` and
+a :ref:`MultiMeshInstance <class_MultiMeshInstance>`. Using the following technique you
 can render thousands of animated objects, even on low end hardware.
 
-We will start by animating one fish. Then, we will see how to extend that animation to 
+We will start by animating one fish. Then, we will see how to extend that animation to
 thousands of fish.
 
 Animating one Fish
 ------------------
 
-We will start with a single fish. Load your fish model into a :ref:`MeshInstance <class_MeshInstance>` 
+We will start with a single fish. Load your fish model into a :ref:`MeshInstance <class_MeshInstance>`
 and add a new :ref:`ShaderMaterial <class_ShaderMaterial>`.
 
 Here is the fish we will be using for the example images, you can use any fish model you like.
 
 .. image:: img/fish.png
 
-.. note:: The fish model in this tutorial is made by `QuaterniusDev <http://quaternius.com>`_ and is 
-          shared with a creative commons license. CC0 1.0 Universal (CC0 1.0) Public Domain 
-          Dedication https://creativecommons.org/publicdomain/zero/1.0/ 
-          
-Typically, you would use bones and a :ref:`Skeleton <class_Skeleton>` to animate objects. However, 
-bones are animated on the CPU and so you end having to calculate thousands of operations every 
-frame and it becomes impossible to have thousands of objects. Using vertex animation in a vertex 
+.. note:: The fish model in this tutorial is made by `QuaterniusDev <http://quaternius.com>`_ and is
+          shared with a creative commons license. CC0 1.0 Universal (CC0 1.0) Public Domain
+          Dedication https://creativecommons.org/publicdomain/zero/1.0/
+
+Typically, you would use bones and a :ref:`Skeleton <class_Skeleton>` to animate objects. However,
+bones are animated on the CPU and so you end having to calculate thousands of operations every
+frame and it becomes impossible to have thousands of objects. Using vertex animation in a vertex
 shader, you avoid using bones and can instead calculate the full animation in a few lines of code
 and completely on the GPU.
 
@@ -46,9 +46,9 @@ We use uniforms to control the strength of the motion so that you can tweak the
 results in real time, without the shader having to recompile.
 
 All the motions will be made using cosine waves applied to ``VERTEX`` in model space. We want the vertices to
-be in model space so that the motion is always relative to the orientation of the fish. For example, side-to-side 
+be in model space so that the motion is always relative to the orientation of the fish. For example, side-to-side
 will always move the fish back and forth in its left to right direction, instead of on the ``x`` axis in the
-world orientation. 
+world orientation.
 
 In order to control the speed of the animation, we will start by defining our own time variable using ``TIME``.
 
@@ -57,7 +57,7 @@ In order to control the speed of the animation, we will start by defining our ow
   //time_scale is a uniform float
   float time = TIME * time_scale;
 
-The first motion we will implement is the side to side motion. It can be made by offsetting ``VERTEX.x`` by 
+The first motion we will implement is the side to side motion. It can be made by offsetting ``VERTEX.x`` by
 ``cos`` of ``TIME``. Each time the mesh is rendered, all the vertices will move to the side by the amount
 of ``cos(time)``.
 
@@ -70,7 +70,7 @@ The resulting animation should look something like this:
 
 .. image:: img/sidetoside.gif
 
-Next, we add the pivot. Because the fish is centered at (0, 0), all we have to do is multiply ``VERTEX`` by a 
+Next, we add the pivot. Because the fish is centered at (0, 0), all we have to do is multiply ``VERTEX`` by a
 rotation matrix for it to rotate around the center of the fish.
 
 We construct a rotation matrix like so:
@@ -92,8 +92,8 @@ With only the pivot applied you should see something like this:
 
 .. image:: img/pivot.gif
 
-The next two motions need to pan down the spine of the fish. For that, we need a new variable, ``body``. 
-``body`` is a float that is ``0`` at the tail of the fish and ``1`` at its head. 
+The next two motions need to pan down the spine of the fish. For that, we need a new variable, ``body``.
+``body`` is a float that is ``0`` at the tail of the fish and ``1`` at its head.
 
 .. code-block:: glsl
 
@@ -111,11 +111,11 @@ along the spine, which is the variable we defined above, ``body``.
 This looks very similar to the side to side motion we defined above, but in this one, by
 using ``body`` to offset ``cos`` each vertex along the spine has a different position in
 the wave making it look like a wave is moving along the fish.
- 
+
 .. image:: img/wave.gif
 
 The last motion is the twist, which is a panning roll along the spine. Similarly to the pivot,
-we first  construct a rotation matrix. 
+we first  construct a rotation matrix.
 
 .. code-block:: glsl
 
@@ -123,7 +123,7 @@ we first  construct a rotation matrix.
   float twist_angle = cos(time + body) * 0.3 * twist;
   mat2 twist_matrix = mat2(vec2(cos(twist_angle), -sin(twist_angle)), vec2(sin(twist_angle), cos(twist_angle)));
 
-We apply the rotation in the ``xy`` axes so that the fish appears to roll around its spine. For 
+We apply the rotation in the ``xy`` axes so that the fish appears to roll around its spine. For
 this to work, the fish's spine needs to be centered on the ``z`` axis.
 
 .. code-block:: glsl
@@ -138,7 +138,7 @@ If we apply all these motions one after another, we get a fluid jelly-like motio
 
 .. image:: img/all_motions.gif
 
-Normal fish swim mostly with the back half of their body. Accordingly, we need to limit the 
+Normal fish swim mostly with the back half of their body. Accordingly, we need to limit the
 panning motions to the back half of the fish. To do this, we create a new variable, ``mask``.
 
 ``mask`` is a float that goes from ``0`` at the front of the fish to ``1`` at the end  using
@@ -160,10 +160,10 @@ For the wave, we multiply the motion by ``mask`` which will limit it to the back
   //wave motion with mask
   VERTEX.x += cos(time + body) * mask * wave;
 
-In order to apply the mask to the twist, we use ``mix``. ``mix`` allows us to mix the 
-vertex position between a fully rotated vertex and one that is not rotated. We need to 
+In order to apply the mask to the twist, we use ``mix``. ``mix`` allows us to mix the
+vertex position between a fully rotated vertex and one that is not rotated. We need to
 use ``mix`` instead of multiplying ``mask`` by the rotated ``VERTEX`` because we are not
-adding the motion to the ``VERTEX`` we are replacing the ``VERTEX`` with the rotated 
+adding the motion to the ``VERTEX`` we are replacing the ``VERTEX`` with the rotated
 version. If we multiplied that by ``mask``, we would shrink the fish.
 
 .. code-block:: glsl
@@ -181,44 +181,44 @@ find that you can create a wide variety of swim styles using these four motions.
 Making a school of fish
 -----------------------
 
-Godot makes it easy to render thousands of the same object using a MultiMeshInstance node. 
+Godot makes it easy to render thousands of the same object using a MultiMeshInstance node.
 
-A MultiMeshInstance node is created and used the same way you would make a MeshInstance node. 
-For this tutorial, we will name the MultiMeshInstance node ``School``, because it will contain 
+A MultiMeshInstance node is created and used the same way you would make a MeshInstance node.
+For this tutorial, we will name the MultiMeshInstance node ``School``, because it will contain
 a school of fish.
 
-Once you have a MultiMeshInstance add a :ref:`MultiMesh <class_MultiMesh>`, and to that 
+Once you have a MultiMeshInstance add a :ref:`MultiMesh <class_MultiMesh>`, and to that
 MultiMesh add your :ref:`Mesh <class_Mesh>` with the shader from above.
 
-MultiMeshes draw your Mesh with three additional per-instance properties: Transform (rotation, 
-translation, scale), Color, and Custom. Custom is used to pass in 4 multi-use variables using 
+MultiMeshes draw your Mesh with three additional per-instance properties: Transform (rotation,
+translation, scale), Color, and Custom. Custom is used to pass in 4 multi-use variables using
 a :ref:`Color <class_Color>`.
 
-``instance_count`` specifies how many instances of the mesh you want to draw. For now, leave 
-``instance_count`` at ``0`` because you cannot change any of the other parameters while 
+``instance_count`` specifies how many instances of the mesh you want to draw. For now, leave
+``instance_count`` at ``0`` because you cannot change any of the other parameters while
 ``instance_count`` is larger than ``0``. We will set ``instance count`` in GDScript later.
 
 ``transform_format`` specifies whether the transforms used are 3D or 2D. For this tutorial, select 3D.
 
-For both ``color_format`` and ``custom_data_format`` you can choose between ``None``, ``Byte``, and 
-``Float``. ``None`` means you won't be passing in that data (either a per-instance ``COLOR`` variable, 
-or ``INSTANCE_CUSTOM``) to the shader. ``Byte`` means each number making up the color you pass in will 
-be stored with 8 bits while ``Float`` means each number will be stored in a floating-point number 
-(32 bits). ``Float`` is slower but more precise, ``Byte`` will take less memory and be faster, but you 
-may see some visual artifacts. 
+For both ``color_format`` and ``custom_data_format`` you can choose between ``None``, ``Byte``, and
+``Float``. ``None`` means you won't be passing in that data (either a per-instance ``COLOR`` variable,
+or ``INSTANCE_CUSTOM``) to the shader. ``Byte`` means each number making up the color you pass in will
+be stored with 8 bits while ``Float`` means each number will be stored in a floating-point number
+(32 bits). ``Float`` is slower but more precise, ``Byte`` will take less memory and be faster, but you
+may see some visual artifacts.
 
 Now, set ``instance_count`` to the number of fish you want to have.
 
 Next we need to set the per-instance transforms.
 
-There are two ways to set per-instance transforms for MultiMeshes. The first is entirely in editor 
+There are two ways to set per-instance transforms for MultiMeshes. The first is entirely in editor
 and is described in the :ref:`MultiMeshInstance tutorial <doc_using_multi_mesh_instance>`.
 
 The second is to loop over all the instances and set their transforms in code. Below, we use GDScript
-to loop over all the instances and set their transform to a random position. 
+to loop over all the instances and set their transform to a random position.
 
 ::
-  
+
   for i in range($School.multimesh.instance_count):
     var position = Transform()
     position = position.translated(Vector3(randf() * 100 - 50, randf() * 50 - 25, randf() * 50 - 25))
@@ -229,15 +229,15 @@ MultiMeshInstance.
 
 .. note:: If performance is an issue for you, try running the scene with GLES2 or with fewer fish.
 
-Notice how all the fish  are all in the same position in their swim cycle? It makes them look very 
-robotic. The next step is to give each fish a different position in the swim cycle so the entire 
+Notice how all the fish  are all in the same position in their swim cycle? It makes them look very
+robotic. The next step is to give each fish a different position in the swim cycle so the entire
 school looks more organic.
 
 Animating a school of fish
 --------------------------
 
 One of the benefits of animating the fish using ``cos`` functions is that they are animated with
-one parameter, ``time``. In order to give each fish a unique position in the 
+one parameter, ``time``. In order to give each fish a unique position in the
 swim cycle, we only need to offset ``time``.
 
 We do that by adding the per-instance custom value ``INSTANCE_CUSTOM`` to ``time``.
@@ -246,16 +246,16 @@ We do that by adding the per-instance custom value ``INSTANCE_CUSTOM`` to ``time
 
   float time = (TIME * time_scale) + (6.28318 * INSTANCE_CUSTOM.x);
 
-Next, we need to pass a value into ``INSTANCE_CUSTOM``. We do that by adding one line into 
-the ``for`` loop from above. In the ``for`` loop we assign each instance a set of four 
-random floats to use. 
+Next, we need to pass a value into ``INSTANCE_CUSTOM``. We do that by adding one line into
+the ``for`` loop from above. In the ``for`` loop we assign each instance a set of four
+random floats to use.
 
 ::
-  
+
   $School.multimesh.set_instance_custom_data(i, Color(randf(), randf(), randf(), randf()))
 
-Now the fish all have unique positions in the swim cycle. You can give them a little more 
-individuality by using ``INSTANCE_CUSTOM`` to make them swim faster or slower by multiplying 
+Now the fish all have unique positions in the swim cycle. You can give them a little more
+individuality by using ``INSTANCE_CUSTOM`` to make them swim faster or slower by multiplying
 by ``TIME``.
 
 .. code-block:: glsl
@@ -268,8 +268,8 @@ custom value.
 
 One problem that you will run into at this point is that the fish are animated, but they are not
 moving. You can move them by updating the per-instance transform for each fish every frame. Although
-doing so will be faster than moving thousands of MeshInstances per frame, it'll still likely be 
+doing so will be faster than moving thousands of MeshInstances per frame, it'll still likely be
 slow.
 
-In the next tutorial we will cover how to use :ref:`Particles <class_Particles>` to take advantage 
+In the next tutorial we will cover how to use :ref:`Particles <class_Particles>` to take advantage
 of the GPU and move each fish around individually while still receiving the benefits of instancing.

+ 20 - 20
tutorials/3d/vertex_animation/controlling_thousands_of_fish.rst

@@ -3,25 +3,25 @@
 Controlling thousands of fish with Particles
 ============================================
 
-The problem with :ref:`MeshInstances <class_MeshInstance>` is that it is expensive to 
-update their transform array. It is great for placing many static objects around the 
-scene. But it is still difficult to move the objects around the scene. 
+The problem with :ref:`MeshInstances <class_MeshInstance>` is that it is expensive to
+update their transform array. It is great for placing many static objects around the
+scene. But it is still difficult to move the objects around the scene.
 
-To make each instance move in an interesting way, we will use a 
-:ref:`Particles <class_Particles>` node. Particles take advantage of GPU acceleration 
+To make each instance move in an interesting way, we will use a
+:ref:`Particles <class_Particles>` node. Particles take advantage of GPU acceleration
 by computing and setting the per-instance information in a :ref:`Shader <class_Shader>`.
 
-.. note:: Particles are not available in GLES2, instead use :ref:`CPUParticles <class_CPUParticles>`, 
-          which do the same thing as Particles, but do not benefit from GPU acceleration. 
+.. note:: Particles are not available in GLES2, instead use :ref:`CPUParticles <class_CPUParticles>`,
+          which do the same thing as Particles, but do not benefit from GPU acceleration.
 
-First create a Particles node. Then, under "Draw Passes" set the Particle's "Draw Pass 1" to your 
-:ref:`Mesh <class_Mesh>`. Then under "Process Material" create a new 
+First create a Particles node. Then, under "Draw Passes" set the Particle's "Draw Pass 1" to your
+:ref:`Mesh <class_Mesh>`. Then under "Process Material" create a new
 :ref:`ShaderMaterial <class_ShaderMaterial>`.
 
 Set the ``shader_type`` to ``particles``.
 
 .. code-block:: glsl
-  
+
   shader_type particles
 
 Then add the following two functions:
@@ -38,7 +38,7 @@ Then add the following two functions:
     if (s < 0)
       s += 2147483647;
     seed = uint(s);
-    return float(seed % uint(65536)) / 65535.0; 
+    return float(seed % uint(65536)) / 65535.0;
   }
 
   uint hash(uint x) {
@@ -48,19 +48,19 @@ Then add the following two functions:
     return x;
   }
 
-These functions come from the default :ref:`ParticlesMaterial <class_ParticlesMaterial>`. 
+These functions come from the default :ref:`ParticlesMaterial <class_ParticlesMaterial>`.
 They are used to generate a random number from  each particle's ``RANDOM_SEED``.
 
 A unique thing about particle shaders is that some built-in variables are saved across frames.
 ``TRANSFORM``, ``COLOR``, and ``CUSTOM`` can all be accessed in the Spatial shader of the mesh, and
-also in the particle shader the next time it is run. 
+also in the particle shader the next time it is run.
 
 Next, setup your ``vertex`` function. Particles shaders only contain a vertex function
 and no others.
 
 First we will distinguish between code that needs to be run only when the particle system starts
 and code that should always run. We want to give each fish a random position and a random animation
-offset when the system is first run. To do so, we wrap that code in an ``if`` statement that checks the 
+offset when the system is first run. To do so, we wrap that code in an ``if`` statement that checks the
 built-in variable ``RESTART`` which becomes ``true`` for one frame when the particle system is restarted.
 
 From a high level, this looks like:
@@ -92,8 +92,8 @@ Then, use those seeds to generate random numbers using ``rand_from_seed``:
 .. code-block:: glsl
 
   CUSTOM.x = rand_from_seed(alt_seed1);
-  vec3 position = vec3(rand_from_seed(alt_seed2) * 2.0 - 1.0, 
-                       rand_from_seed(alt_seed3) * 2.0 - 1.0, 
+  vec3 position = vec3(rand_from_seed(alt_seed2) * 2.0 - 1.0,
+                       rand_from_seed(alt_seed3) * 2.0 - 1.0,
                        rand_from_seed(alt_seed4) * 2.0 - 1.0);
 
 Finally, assign ``position`` to ``TRANSFORM[3].xyz``, which is the part of the transform that holds
@@ -119,19 +119,19 @@ Let's transform the fish by setting their ``VELOCITY``.
 This is the most basic way to set ``VELOCITY`` every particle (or fish) will have the same velocity.
 
 Just by setting ``VELOCITY`` you can make the fish swim however you want. For example, try the code
-below. 
+below.
 
 .. code-block:: glsl
 
   VELOCITY.z = cos(TIME + CUSTOM.x * 6.28) * 4.0 + 6.0;
 
-This will give each fish a unique speed between ``2`` and ``10``. 
+This will give each fish a unique speed between ``2`` and ``10``.
 
 If you used ``CUSTOM.y`` in the last tutorial, you can also set the speed of the swim animation based
 on the ``VELOCITY``. Just use ``CUSTOM.y``.
 
 .. code-block:: glsl
-  
+
   CUSTOM.y = VELOCITY.z * 0.1;
 
 This code gives you the following behavior:
@@ -139,6 +139,6 @@ This code gives you the following behavior:
 .. image:: img/scene.gif
 
 Using a ParticlesMaterial you can make the fish behavior as simple or complex as you like. In this
-tutorial we only set Velocity, but in your own Shaders you can also set ``COLOR``, rotation, scale 
+tutorial we only set Velocity, but in your own Shaders you can also set ``COLOR``, rotation, scale
 (through ``TRANSFORM``). Please refer to the :ref:`Particles Shader Reference <doc_particle_shader>`
 for more information on particle shaders.

+ 1 - 1
tutorials/animation/cutout_animation.rst

@@ -32,7 +32,7 @@ Godot provides tools for working with cutout rigs, and is ideal for the workflow
    means animations can control much more than just motion of objects. Textures,
    sprite sizes, pivots, opacity, color modulation, and more, can all be animated
    and blended.
--  **Combine animation styles**: AnimatedSprite allows traditional cel animation 
+-  **Combine animation styles**: AnimatedSprite allows traditional cel animation
    to be used alongside cutout animation. In cel animation different animation
    frames use entirely different drawings rather than the same pieces positioned
    differently. In an otherwise cutout-based animation, cel animation can be used

+ 2 - 2
tutorials/animation/introduction_2d.rst

@@ -17,8 +17,8 @@ In this guide you learn to:
 -  Call functions with the powerful Call Function Tracks
 
 In Godot, you can animate anything available in the Inspector, such as
-Node transforms, sprites, UI elements, particles, visibility and color 
-of materials, and so on. You can also modify values of script variables 
+Node transforms, sprites, UI elements, particles, visibility and color
+of materials, and so on. You can also modify values of script variables
 and call any function.
 
 Create an AnimationPlayer node

+ 0 - 2
tutorials/audio/index.rst

@@ -9,5 +9,3 @@ Audio
    audio_streams
    sync_with_audio
    recording_with_microphone
-
-

+ 2 - 2
tutorials/audio/recording_with_microphone.rst

@@ -29,7 +29,7 @@ An ``AudioStreamPlayer`` named ``AudioStreamRecord`` is used for recording.
 
     var effect
     var recording
-    
+
     func _ready():
         # We get the index of the "Record" bus.
         var idx = AudioServer.get_bus_index("Record")
@@ -42,7 +42,7 @@ which has three methods:
 :ref:`get_recording() <class_AudioEffectRecord_method_get_recording>`,
 :ref:`is_recording_active() <class_AudioEffectRecord_method_is_recording_active>`,
 and :ref:`set_recording_active() <class_AudioEffectRecord_method_set_recording_active>`.
-        
+
 .. tabs::
   .. code-tab:: gdscript GDScript
 

+ 0 - 1
tutorials/content/index.rst

@@ -7,4 +7,3 @@ Creating content
 
    procedural_geometry/index
    making_trees
-

+ 2 - 2
tutorials/content/procedural_geometry/arraymesh.rst

@@ -10,7 +10,7 @@ which takes up to four parameters. The first two are required, while the second
 
 The first is the ``PrimitiveType``, this is an OpenGL concept that instructs the GPU
 how to arrange the primitive based on the vertices given whether it is triangles,
-lines, points, etc. A complete list can be found under the :ref:`Mesh <class_mesh>` 
+lines, points, etc. A complete list can be found under the :ref:`Mesh <class_mesh>`
 class reference page.
 
 The second is the actual Array that stores the mesh information. The array is a normal Godot array that
@@ -202,7 +202,7 @@ that you find online.
 
 Combined with the code above, this code will generate a sphere.
 
-When it comes to generating geometry with the ArrayMesh you need to understand what goes 
+When it comes to generating geometry with the ArrayMesh you need to understand what goes
 in each array and then you can follow tutorials for any language/engine and convert it into Godot.
 
 Saving

+ 26 - 26
tutorials/content/procedural_geometry/index.rst

@@ -2,8 +2,8 @@ Procedural geometry
 ===================
 
 There are many ways to procedurally generate geometry in Godot. In this tutorial series
-we will explore a few of them. Each technique has its own benefits and drawbacks, so 
-it is best to understand each one and how it can be useful in a given situation. 
+we will explore a few of them. Each technique has its own benefits and drawbacks, so
+it is best to understand each one and how it can be useful in a given situation.
 
 .. toctree::
    :maxdepth: 1
@@ -23,19 +23,19 @@ by an array of positions called "vertices". In Godot, geometry is represented by
 What is a Mesh?
 ---------------
 
-Many things in Godot have mesh in their name: the :ref:`Mesh <class_Mesh>`, the :ref:`ArrayMesh <class_ArrayMesh>`, 
-the :ref:`MeshInstance <class_MeshInstance>`, the :ref:`MultiMesh <class_MultiMesh>`, and 
-the :ref:`MultiMeshInstance <class_MultiMeshInstance>`. While they are all related, they have slightly different uses. 
+Many things in Godot have mesh in their name: the :ref:`Mesh <class_Mesh>`, the :ref:`ArrayMesh <class_ArrayMesh>`,
+the :ref:`MeshInstance <class_MeshInstance>`, the :ref:`MultiMesh <class_MultiMesh>`, and
+the :ref:`MultiMeshInstance <class_MultiMeshInstance>`. While they are all related, they have slightly different uses.
 
-Meshes and ArrayMeshes are resources that are drawn using a MeshInstance node. Resources like 
-Meshes and ArrayMeshes cannot be added to the scene directly. A MeshInstance represents one 
-instance of a mesh in your scene. You can reuse a single mesh in multiple MeshInstances 
-to draw it in different parts of your scene with different materials or transformations (scale, 
-rotation, position etc.). 
+Meshes and ArrayMeshes are resources that are drawn using a MeshInstance node. Resources like
+Meshes and ArrayMeshes cannot be added to the scene directly. A MeshInstance represents one
+instance of a mesh in your scene. You can reuse a single mesh in multiple MeshInstances
+to draw it in different parts of your scene with different materials or transformations (scale,
+rotation, position etc.).
 
-If you are going to draw the same object many times, it can be helpful to use a MultiMesh with 
-a MultiMeshInstance. The MultiMeshInstance draws meshes thousands of times very 
-cheaply. It takes advantage of hardware instancing in order to do so. The drawback with 
+If you are going to draw the same object many times, it can be helpful to use a MultiMesh with
+a MultiMeshInstance. The MultiMeshInstance draws meshes thousands of times very
+cheaply. It takes advantage of hardware instancing in order to do so. The drawback with
 using a MultiMeshInstance is that you are limited to one material for all instances. It uses an
 instance array to store different colors and transformations for each instance, but all the
 instances use the same material.
@@ -58,15 +58,15 @@ Surface array
 ^^^^^^^^^^^^^
 
 The surface array is an array of length ``ArrayMesh.ARRAY_MAX``. Each position in the array is
-filled with a sub-array containing per-vertex information. For example, the array located at 
-``ArrayMesh.ARRAY_NORMAL`` is a :ref:`PackedVector3Array <class_PackedVector3Array>` of vertex normals. 
+filled with a sub-array containing per-vertex information. For example, the array located at
+``ArrayMesh.ARRAY_NORMAL`` is a :ref:`PackedVector3Array <class_PackedVector3Array>` of vertex normals.
 
 The surface array can be indexed or non-indexed. Creating a non-indexed array is as easy as not assigning
 an array at the index ``ArrayMesh.ARRAY_INDEX``. A non-indexed array stores unique vertex information for
-every triangle, meaning that when two triangle share a vertex, the vertex is duplicated in the array. An 
-indexed surface array only stores vertex information for each unique vertex and then also stores an array 
-of indices which maps out how to construct the triangles from the vertex array. In general, using an indexed 
-array is faster, but it means you have to share vertex data between triangles, which is not always desired 
+every triangle, meaning that when two triangle share a vertex, the vertex is duplicated in the array. An
+indexed surface array only stores vertex information for each unique vertex and then also stores an array
+of indices which maps out how to construct the triangles from the vertex array. In general, using an indexed
+array is faster, but it means you have to share vertex data between triangles, which is not always desired
 (e.g. when you want per-face normals).
 
 Tools
@@ -78,7 +78,7 @@ be provided in the following tutorials.
 ArrayMesh
 ^^^^^^^^^
 
-The ArrayMesh resource extends Mesh to add a few different quality of life functions, and most 
+The ArrayMesh resource extends Mesh to add a few different quality of life functions, and most
 importantly, the ability to construct a Mesh surface through scripting.
 
 For more information about the ArrayMesh, please see the :ref:`ArrayMesh tutorial <doc_arraymesh>`.
@@ -86,7 +86,7 @@ For more information about the ArrayMesh, please see the :ref:`ArrayMesh tutoria
 MeshDataTool
 ^^^^^^^^^^^^
 
-The MeshDataTool is a resource that converts Mesh data into arrays of vertices, faces, and edges that can 
+The MeshDataTool is a resource that converts Mesh data into arrays of vertices, faces, and edges that can
 be modified at runtime.
 
 For more information about the MeshDataTool, please see the :ref:`MeshDataTool tutorial <doc_meshdatatool>`.
@@ -94,7 +94,7 @@ For more information about the MeshDataTool, please see the :ref:`MeshDataTool t
 SurfaceTool
 ^^^^^^^^^^^
 
-The SurfaceTool allows the creation of Meshes using an OpenGL 1.x immediate mode style interface. 
+The SurfaceTool allows the creation of Meshes using an OpenGL 1.x immediate mode style interface.
 
 For more information about the SurfaceTool, please see the :ref:`SurfaceTool tutorial <doc_surfacetool>`.
 
@@ -103,11 +103,11 @@ ImmediateGeometry
 
 ImmediateGeometry is a node that uses an immediate mode style interface (like SurfaceTool) to draw objects. The
 difference between ImmediateGeometry and the SurfaceTool is that ImmediateGeometry is a node itself that can be
-added to the scene tree and is drawn directly from the code. The SurfaceTool generates a Mesh that needs to be added 
-a MeshInstance to be seen. 
+added to the scene tree and is drawn directly from the code. The SurfaceTool generates a Mesh that needs to be added
+a MeshInstance to be seen.
 
 ImmediateGeometry is useful for prototyping because of the straightforward API, but it is slow because the geometry
-is rebuilt every frame. It is most useful for quickly adding simple geometry to debug visually (e.g. by drawing lines to 
+is rebuilt every frame. It is most useful for quickly adding simple geometry to debug visually (e.g. by drawing lines to
 visualize physics raycasts etc.).
 
 For more information about ImmediateGeometry, please see the :ref:`ImmediateGeometry tutorial <doc_immediategeometry>`.
@@ -126,7 +126,7 @@ ImmediateGeometry regenerates the mesh every frame, so it is much slower than Ar
 need the geometry to change every frame anyway it provides a much easier interface that may even be a little faster than generating
 an ArrayMesh every frame.
 
-The MeshDataTool is not fast, but it gives you access to all kinds of properties of the mesh that you don't get with the others 
+The MeshDataTool is not fast, but it gives you access to all kinds of properties of the mesh that you don't get with the others
 (edges, faces, etc.). It is incredibly useful when you need that sort of data to transform the mesh, but it is not a good idea
 to use if that information is not needed. The MeshDataTool is best used if you are going to be using an algorithm that requires
 access to the face or edge array.

+ 2 - 2
tutorials/content/procedural_geometry/meshdatatool.rst

@@ -4,10 +4,10 @@ Using the MeshDataTool
 ======================
 
 The MeshDataTool is not used to generate geometry. But it is helpful for dynamically altering geometry, for example
-if you want to write a script to tessellate, simplify, or deform meshes. 
+if you want to write a script to tessellate, simplify, or deform meshes.
 
 The MeshDataTool is not as fast as altering arrays directly using ArrayMesh. However, it provides more information
-and tools to work with meshes than the ArrayMesh does. When the MeshDataTool 
+and tools to work with meshes than the ArrayMesh does. When the MeshDataTool
 is used, it calculates mesh data that is not available in ArrayMeshes such as faces and edges, which are necessary
 for certain mesh algorithms. If you do not need this extra information then it may be better to use an ArrayMesh.
 

+ 0 - 1
tutorials/debug/index.rst

@@ -6,4 +6,3 @@ Debug
    :name: toc-learn-features-debug
 
    overview_of_debugging_tools
-

+ 3 - 3
tutorials/gui/bbcode_in_richtextlabel.rst

@@ -291,12 +291,12 @@ Matrix
         var clear_time = char_fx.env.get("clean", 2.0)
         var dirty_time = char_fx.env.get("dirty", 1.0)
         var text_span = char_fx.env.get("span", 50)
-		
+
         var value = char_fx.character
-		
+
         var matrix_time = fmod(char_fx.elapsed_time + (char_fx.absolute_index / float(text_span)), \
                                clear_time + dirty_time)
-		
+
         matrix_time = 0.0 if matrix_time < clear_time else \
                       (matrix_time - clear_time) / dirty_time
 

+ 24 - 24
tutorials/gui/gui_containers.rst

@@ -3,13 +3,13 @@
 Containers
 ==========
 
-:ref:`Anchors <doc_size_and_anchors>` are an efficient way to handle 
-different aspect ratios for basic multiple resolution handling in GUIs,  
+:ref:`Anchors <doc_size_and_anchors>` are an efficient way to handle
+different aspect ratios for basic multiple resolution handling in GUIs,
 
-For more complex user interfaces, they can become difficult to use. 
+For more complex user interfaces, they can become difficult to use.
 
 This is often the case of games, such as RPGs, online chats, tycoons or simulations. Another
-common case where more advanced layout features may be required is in-game tools (or simply just tools). 
+common case where more advanced layout features may be required is in-game tools (or simply just tools).
 
 All these situations require a more capable OS-like user interface, with advanced layout and formatting.
 For that, :ref:`Containers <class_container>` are more useful.
@@ -25,7 +25,7 @@ When a :ref:`Container <class_Container>`-derived node is used, all children :re
 own positioning ability. This means the *Container* will control their positioning and any attempt to manually alter these
 nodes will be either ignored or invalidated the next time their parent is resized.
 
-Likewise, when a *Container* derived node is resized, all its children will be re-positioned according to it, 
+Likewise, when a *Container* derived node is resized, all its children will be re-positioned according to it,
 with a behavior based on the type of container used:
 
    .. image:: img/container_example.gif
@@ -44,14 +44,14 @@ can be found by inspecting any control that is a child of a *Container*.
 
 Size flags are independent for vertical and horizontal sizing and not all containers make use of them (but most do):
 
-* **Fill**: Ensures the control *fills* the designated area within the container. No matter if 
+* **Fill**: Ensures the control *fills* the designated area within the container. No matter if
   a control *expands* or not (see below), it will only *fill* the designated area when this is toggled on (it is by default).
-* **Expand**: Attempts to use as most space as possible in the parent container (in this each axis). 
-  Controls that don't expand will be pushed away by those that do. Between those expanding, the 
+* **Expand**: Attempts to use as most space as possible in the parent container (in this each axis).
+  Controls that don't expand will be pushed away by those that do. Between those expanding, the
   amount of space they take from each other is determined by the *Ratio* (see below).
-* **Shrink Center** When expanding (and if not filling), try to remain at the center of the expanded 
+* **Shrink Center** When expanding (and if not filling), try to remain at the center of the expanded
   area (by default it remains at the left or top).
-* **Ratio** Simple ratio of how much expanded controls take up the available space in relation to each 
+* **Ratio** Simple ratio of how much expanded controls take up the available space in relation to each
   other. A control with "2", will take up twice as much available space as one with "1".
 
 Experimenting with these flags and different containers is recommended to get a better grasp on how they work.
@@ -75,7 +75,7 @@ These containers make use of the *Ratio* property for children with the *Expand*
 Grid Container
 ^^^^^^^^^^^^^^
 
-Arranges child controls in a grid layout (via :ref:`GridContainer <class_GridContainer>`, amount 
+Arranges child controls in a grid layout (via :ref:`GridContainer <class_GridContainer>`, amount
 of columns must be specified). Uses both the vertical and horizontal expand flags.
 
    .. image:: img/containers_grid.png
@@ -83,8 +83,8 @@ of columns must be specified). Uses both the vertical and horizontal expand flag
 Margin Container
 ^^^^^^^^^^^^^^^^
 
-Child controls are expanded towards the bounds of this control (via 
-:ref:`MarginContainer <class_MarginContainer>`). Padding will be added on the margins 
+Child controls are expanded towards the bounds of this control (via
+:ref:`MarginContainer <class_MarginContainer>`). Padding will be added on the margins
 depending on the theme configuration.
 
    .. image:: img/containers_margin.png
@@ -97,8 +97,8 @@ constants overrides section if desired for a single control:
 Tab Container
 ^^^^^^^^^^^^^
 
-Allows you to place several child controls stacked on top of each other (via 
-:ref:`TabContainer <class_TabContainer>`), with only the *current* one visible. 
+Allows you to place several child controls stacked on top of each other (via
+:ref:`TabContainer <class_TabContainer>`), with only the *current* one visible.
 
    .. image:: img/containers_tab.png
 
@@ -113,8 +113,8 @@ Settings such as tab placement and *StyleBox* can be modified in the *TabContain
 Split Container
 ^^^^^^^^^^^^^^^
 
-Accepts only one or two children controls, then places them side to side with a divisor 
-(via :ref:`HSplitContainer <class_HSplitContainer>` and :ref:`VSplitContainer <class_VSplitContainer>`). 
+Accepts only one or two children controls, then places them side to side with a divisor
+(via :ref:`HSplitContainer <class_HSplitContainer>` and :ref:`VSplitContainer <class_VSplitContainer>`).
 Respects both horizontal and vertical flags, as well as *Ratio*.
 
    .. image:: img/containers_split.png
@@ -127,8 +127,8 @@ The divisor can be dragged around to change the size relation between both child
 PanelContainer
 ^^^^^^^^^^^^^^
 
-Simple container that draws a *StyleBox*, then expands children to cover its whole area 
-(via :ref:`PanelContainer <class_PanelContainer>`, respecting the *StyleBox* margins). 
+Simple container that draws a *StyleBox*, then expands children to cover its whole area
+(via :ref:`PanelContainer <class_PanelContainer>`, respecting the *StyleBox* margins).
 It respects both the horizontal and vertical size flags.
 
    .. image:: img/containers_panel.png
@@ -138,9 +138,9 @@ This container is useful as top-level, or just to add custom backgrounds to sect
 ScrollContainer
 ^^^^^^^^^^^^^^^
 
-Accepts a single child node. If this node is bigger than the container, scrollbars will be added 
-to allow panning the node around (via :ref:`ScrollContainer <class_ScrollContainer>`). Both 
-vertical and horizontal size flags are respected, and the behavior can be turned on or off 
+Accepts a single child node. If this node is bigger than the container, scrollbars will be added
+to allow panning the node around (via :ref:`ScrollContainer <class_ScrollContainer>`). Both
+vertical and horizontal size flags are respected, and the behavior can be turned on or off
 per axis in the properties.
 
    .. image:: img/containers_scroll.png
@@ -155,7 +155,7 @@ As in the example above, one of the most common ways to use this container is to
 ViewportContainer
 ^^^^^^^^^^^^^^^^^
 
-This is a special control that will only accept a single *Viewport* node as child, and it will display 
+This is a special control that will only accept a single *Viewport* node as child, and it will display
 it as if it was an image (via :ref:`ViewportContainer <class_ViewportContainer>`).
 
 Creating custom Containers
@@ -175,7 +175,7 @@ to its rect size:
             for c in get_children():
                 # Fit to own size
                 fit_child_in_rect( c, Rect2( Vector2(), rect_size ) )
-	
+
     func set_some_setting():
         # Some setting changed, ask for children re-sort
         queue_sort()

+ 0 - 2
tutorials/gui/index.rst

@@ -10,5 +10,3 @@ GUI
    size_and_anchors
    gui_containers
    bbcode_in_richtextlabel
-
-

+ 3 - 3
tutorials/io/files/resource_queue.gd

@@ -98,7 +98,7 @@ func get_resource(path):
 				var pos = queue.find(res)
 				queue.remove(pos)
 				queue.insert(0, res)
-			
+
 			res = _wait_for_resource(res, path)
 			pending.erase(path)
 			_unlock("return")
@@ -116,13 +116,13 @@ func get_resource(path):
 func thread_process():
 	_wait("thread_process")
 	_lock("process")
-	
+
 	while queue.size() > 0:
 		var res = queue[0]
 		_unlock("process_poll")
 		var ret = res.poll()
 		_lock("process_check_queue")
-		
+
 		if ret == ERR_FILE_EOF || ret != OK:
 			var path = res.get_meta("path")
 			if path in pending: # Else, it was already retrieved.

+ 1 - 1
tutorials/math/beziers_and_curves.rst

@@ -30,7 +30,7 @@ change the value of ``t`` from 0 to 1.
 .. tabs::
  .. code-tab:: gdscript GDScript
 
-    func _quadratic_bezier(p0: Vector2, p1: Vector2, p2: Vector2, t: float): 
+    func _quadratic_bezier(p0: Vector2, p1: Vector2, p2: Vector2, t: float):
         var q0 = p0.linear_interpolate(p1, t)
         var q1 = p1.linear_interpolate(p2, t)
 

+ 1 - 1
tutorials/misc/jitter_stutter.rst

@@ -69,7 +69,7 @@ Linux
 
 Stutter may be visible on Desktop Linux, but this is usually associated with different video drivers and compositors.
 Nouveau drivers often exhibit this, while AMD or NVidia proprietary don't. Some compositors may also trigger this problem
-(e.g. KWin), so it is advised to try using a different one to rule it out as the cause. 
+(e.g. KWin), so it is advised to try using a different one to rule it out as the cause.
 
 There is no workaround for driver or compositor stuttering other than reporting it as an issue to the driver or compositor
 developers.

+ 11 - 12
tutorials/misc/state_design_pattern.rst

@@ -63,15 +63,15 @@ Below is the generic state, from which all other states will inherit.
         func move_right():
             pass
 
-A few notes on the above script. First, this implementation uses a 
+A few notes on the above script. First, this implementation uses a
 ``setup(change_state, animated_sprite, persistent_state)`` method to assign
-references. These references will be instantiated in the parent of this state. This helps with something 
-in programming known as *cohesion*. The state of the player does not want the responsibility of creating 
-these variables, but does want to be able to use them. However, this does make the state *coupled* to the 
-state's parent. This means that the state is highly reliant on whether it has a parent which contains 
+references. These references will be instantiated in the parent of this state. This helps with something
+in programming known as *cohesion*. The state of the player does not want the responsibility of creating
+these variables, but does want to be able to use them. However, this does make the state *coupled* to the
+state's parent. This means that the state is highly reliant on whether it has a parent which contains
 these variables. So, remember that *coupling* and *cohesion* are important concepts when it comes to code management.
 
-.. note:: 
+.. note::
     See the following page for more details on cohesion and coupling:
     https://courses.cs.washington.edu/courses/cse403/96sp/coupling-cohesion.html
 
@@ -137,7 +137,7 @@ So, now that there is a base state, the two states discussed earlier can be impl
             if abs(velocity) < min_move_speed:
                  change_state.call_func("idle")
             persistent_state.velocity.x *= friction
-    
+
         func move_left():
             if animated_sprite.flip_h:
                 persistent_state.velocity += move_speed
@@ -225,20 +225,20 @@ will not change it makes sense to call this new script ``persistent_state.gd``.
             state.name = "current_state"
             add_child(state)
 
-.. note:: 
-    The ``persistent_state.gd`` script contains code for detecting input. This was to make the tutorial simple, but it is not usually 
+.. note::
+    The ``persistent_state.gd`` script contains code for detecting input. This was to make the tutorial simple, but it is not usually
     best practice to do this.
 
 Project setup
 -------------
 
-This tutorial made an assumption that the node it would be attached to contained a child node which is an :ref:`AnimatedSprite <class_AnimatedSprite>`. 
+This tutorial made an assumption that the node it would be attached to contained a child node which is an :ref:`AnimatedSprite <class_AnimatedSprite>`.
 There is also the assumption that this :ref:`AnimatedSprite <class_AnimatedSprite>` has at least two animations,
 the idle and run animations. Also, the top-level node is assumed to be a :ref:`KinematicBody2D <class_KinematicBody2D>`.
 
 .. image:: img/llama_run.gif
 
-.. note:: 
+.. note::
     The zip file of the llama used in this tutorial is :download:`here <files/llama.zip>`.
     The source was from `piskel_llama <https://www.piskelapp.com/p/agxzfnBpc2tlbC1hcHByEwsSBlBpc2tlbBiAgICfx5ygCQw/edit>`_, but
     I couldn't find the original creator information on that page though...
@@ -254,4 +254,3 @@ player, which is a :ref:`KinematicBody2D <class_KinematicBody2D>`.
 Now the player has utilized the state design pattern to implement its two different states. The nice part of this
 pattern is that if one wanted to add another state, then it would involve creating another class that need only
 focus on itself and how it changes to another state. Each state is functionally separated and instantiated dynamically.
-

+ 15 - 15
tutorials/networking/websocket.rst

@@ -28,10 +28,10 @@ This example will show you how to create a WebSocket connection to a remote serv
 ::
 
     extends Node
-    
+
     # The URL we will connect to
     export var websocket_url = "ws://echo.websocket.org"
-    
+
     # Our WebSocketClient instance
     var _client = WebSocketClient.new()
 
@@ -44,19 +44,19 @@ This example will show you how to create a WebSocket connection to a remote serv
         # a full packet is received.
         # Alternatively, you could check get_peer(1).get_available_packets() in a loop.
         _client.connect("data_received", self, "_on_data")
-    
+
         # Initiate connection to the given URL.
         var err = _client.connect_to_url(websocket_url)
         if err != OK:
             print("Unable to connect")
             set_process(false)
-    
+
     func _closed(was_clean = false):
         # was_clean will tell you if the disconnection was correctly notified
         # by the remote peer before closing the socket.
         print("Closed, clean: ", was_clean)
         set_process(false)
-    
+
     func _connected(proto = ""):
         # This is called on connection, "proto" will be the selected WebSocket
         # sub-protocol (which is optional)
@@ -64,13 +64,13 @@ This example will show you how to create a WebSocket connection to a remote serv
         # You MUST always use get_peer(1).put_packet to send data to server,
         # and not put_packet directly when not using the MultiplayerAPI.
         _client.get_peer(1).put_packet("Test packet".to_utf8())
-    
+
     func _on_data():
         # Print the received packet, you MUST always use get_peer(1).get_packet
         # to receive data from server, and not get_packet directly when not
         # using the MultiplayerAPI.
         print("Got data from server: ", _client.get_peer(1).get_packet().get_string_from_utf8())
-    
+
     func _process(delta):
         # Call this in _process or _physics_process. Data transfer, and signals
         # emission will only happen when calling this function.
@@ -80,7 +80,7 @@ This will print:
 
 ::
 
-    Connected with protocol: 
+    Connected with protocol:
     Got data from server: Test packet
 
 Minimal server example
@@ -91,12 +91,12 @@ This example will show you how to create a WebSocket server that listen for remo
 ::
 
     extends Node
-    
+
     # The port we will listen to
     const PORT = 9080
     # Our WebSocketServer instance
     var _server = WebSocketServer.new()
-    
+
     func _ready():
         # Connect base signals to get notified of new client connections,
         # disconnections, and disconnect requests.
@@ -113,30 +113,30 @@ This example will show you how to create a WebSocket server that listen for remo
         if err != OK:
             print("Unable to start server")
             set_process(false)
-    
+
     func _connected(id, proto):
         # This is called when a new peer connects, "id" will be the assigned peer id,
         # "proto" will be the selected WebSocket sub-protocol (which is optional)
         print("Client %d connected with protocol: %s" % [id, proto])
-    
+
     func _close_request(id, code, reason):
         # This is called when a client notifies that it wishes to close the connection,
         # providing a reason string and close code.
         print("Client %d disconnecting with code: %d, reason: %s" % [id, code, reason])
-    
+
     func _disconnected(id, was_clean = false):
         # This is called when a client disconnects, "id" will be the one of the
         # disconnecting client, "was_clean" will tell you if the disconnection
         # was correctly notified by the remote peer before closing the socket.
         print("Client %d disconnected, clean: %s" % [id, str(was_clean)])
-    
+
     func _on_data(id):
         # Print the received packet, you MUST always use get_peer(id).get_packet to receive data,
         # and not get_packet directly when not using the MultiplayerAPI.
         var pkt = _server.get_peer(id).get_packet()
         print("Got data from client %d: %s ... echoing" % [id, pkt.get_string_from_utf8()])
         _server.get_peer(id).put_packet(pkt)
-    
+
     func _process(delta):
         # Call this in _process or _physics_process.
         # Data transfer, and signals emission will only happen when calling this function.

+ 5 - 5
tutorials/optimization/using_servers.rst

@@ -90,10 +90,10 @@ This is a simple example of how to create a sprite from code and move it using t
  .. code-tab:: gdscript GDScript
 
     extends Node2D
-    
+
     # VisualServer expects references to be kept around
     var sprite
-    
+
     func _ready():
         # Create a canvas item, child of this node.
         var ci_rid = VisualServer.canvas_item_create()
@@ -129,10 +129,10 @@ The 3D APIs are different from the 2D ones, so the instantiation API must be use
  .. code-tab:: gdscript GDScript
 
     extends Spatial
-    
+
     # VisualServer expects references to be kept around
     var mesh
-    
+
     func _ready():
         # Create a visual instance (for 3D).
         var instance = VisualServer.instance_create()
@@ -156,7 +156,7 @@ and moves a :ref:`CanvasItem <class_CanvasItem>` when the body moves.
 
 .. tabs::
  .. code-tab:: gdscript GDScript
-    
+
     # Physics2DServer expects references to be kept around
     var body
     var shape

+ 2 - 4
tutorials/physics/soft_body.rst

@@ -23,7 +23,7 @@ Set the parameters to obtain the type of soft body you aim for. Try to keep the
 
 .. note:: Handle some parameters with care, as some value can lead to strange results. For example, if the shape is not completely closed and you set pressure to more than 0, the softbody will fly around like a plastic bag under strong wind.
 
-Play the scene to view the simulation. 
+Play the scene to view the simulation.
 
 .. tip:: To improve the simulation's result, increase the ``Simulation Precision``, this will give significant improvement at the cost of performance.
 
@@ -64,6 +64,4 @@ Play the scene and the cloak should simulate correctly.
 
 .. image:: img/softbody_cloak_finish.png
 
-This covers the basic settings of softbody, experiment with the parameters to achieve the effect you are aiming for when making your game. 
-
-
+This covers the basic settings of softbody, experiment with the parameters to achieve the effect you are aiming for when making your game.

+ 1 - 1
tutorials/platform/consoles.rst

@@ -33,7 +33,7 @@ Following is the list of providers:
 
 * `Lone Wolf Technology <http://www.lonewolftechnology.com/>`_ offers
   Switch, PS4 and Xbox One porting and publishing of Godot games.
-* `Pineapple Works <https://pineapple.works/>`_ offers 
+* `Pineapple Works <https://pineapple.works/>`_ offers
   Switch and Xbox One porting and publishing of Godot games.
 
 If your company offers porting and/or publishing services for Godot games,

+ 58 - 58
tutorials/platform/customizing_html5_shell.rst

@@ -20,55 +20,55 @@ Some use-cases where customizing the default page is useful include:
 
 The default HTML page is available in the Godot Engine repository at
 `/misc/dist/html/full-size.html <https://github.com/godotengine/godot/blob/master/misc/dist/html/full-size.html>`__
-and can be used as a reference implementation. Another sample HTML page is available at 
+and can be used as a reference implementation. Another sample HTML page is available at
 `/misc/dist/html/fixed-size.html <https://github.com/godotengine/godot/blob/master/misc/dist/html/fixed-size.html>`__.
 It differs from the default one by having a fixed size canvas area and an output widget below it.
 
-.. note:: It is recommended to use developer tools provided by browser vendors to debug 
-          exported projects. Output generated by the engine may be limited and does not 
+.. note:: It is recommended to use developer tools provided by browser vendors to debug
+          exported projects. Output generated by the engine may be limited and does not
           include WebGL errors.
 
 Setup
 -----
-As evident by the default HTML page, it is mostly a regular HTML document. To work with 
-Godot projects it needs to be fully realized, to have a control code that calls 
-the :js:class:`Engine` class, and to provide places for several placeholders, which are 
+As evident by the default HTML page, it is mostly a regular HTML document. To work with
+Godot projects it needs to be fully realized, to have a control code that calls
+the :js:class:`Engine` class, and to provide places for several placeholders, which are
 replaced with their actual values during export.
 
 .. image:: img/html5_export_options.png
 
 - ``$GODOT_BASENAME``:
-  The base name from the *Export Path*, as set up in the export options; suffixes are omitted 
-  (e.g. ``game.html`` becomes ``game``). This variable can be used to generate a path 
-  to the main JavaScript file ``$GODOT_BASENAME.js``, which provides the :js:class:`Engine` 
-  class. A splash image shown during the booting process can be accessed using this variable 
+  The base name from the *Export Path*, as set up in the export options; suffixes are omitted
+  (e.g. ``game.html`` becomes ``game``). This variable can be used to generate a path
+  to the main JavaScript file ``$GODOT_BASENAME.js``, which provides the :js:class:`Engine`
+  class. A splash image shown during the booting process can be accessed using this variable
   as well: ``$GODOT_BASENAME.png``.
 
 - ``$GODOT_PROJECT_NAME``:
   The project name as defined in the Project Settings.
 
 - ``$GODOT_HEAD_INCLUDE``:
-  A custom string to include in the HTML document just before the end of the ``<head>`` tag. It 
-  is customized in the export options under the *Html / Head Include* section. While you fully 
-  control the HTML page you create, this variable can be useful for configuring parts of the 
+  A custom string to include in the HTML document just before the end of the ``<head>`` tag. It
+  is customized in the export options under the *Html / Head Include* section. While you fully
+  control the HTML page you create, this variable can be useful for configuring parts of the
   HTML ``head`` element from the Godot Editor, e.g. for different Web export presets.
 
 - ``$GODOT_DEBUG_ENABLED``:
   A flag that tells if this is a debug build, or not. This variable is substituted by strings
   ``true`` and ``false``, and can be used to disable debug branches within your control code.
 
-When the custom page is ready, it can be selected in the export options under the *Html / Custom Html Shell* 
+When the custom page is ready, it can be selected in the export options under the *Html / Custom Html Shell*
 section.
 
 Starting the project
 --------------------
-To be able to start the game, you need to write a script that initializes the engine — the control 
-code. This process consists of three steps, though some of them can be skipped and left for 
+To be able to start the game, you need to write a script that initializes the engine — the control
+code. This process consists of three steps, though some of them can be skipped and left for
 a default behavior.
 
-First, the engine must be loaded, then it needs to be initialized, and after this the project 
-can finally be started. You can perform every of these steps manually and with great control. 
-However, in the simplest case all you need to do is to create an instance of the :js:class:`Engine` 
+First, the engine must be loaded, then it needs to be initialized, and after this the project
+can finally be started. You can perform every of these steps manually and with great control.
+However, in the simplest case all you need to do is to create an instance of the :js:class:`Engine`
 class and then call the :js:meth:`engine.startGame` method.
 
 .. code-block:: js
@@ -79,29 +79,29 @@ class and then call the :js:meth:`engine.startGame` method.
     const engine = new Engine();
     engine.startGame(execName, mainPack)
 
-This snippet of code automatically loads and initializes the engine before starting the game. 
-It uses the given path to the executable to deduce the path to load the engine. The :js:meth:`engine.startGame` 
-method is asynchronous and returns a ``Promise``. This allows your control code to track if 
+This snippet of code automatically loads and initializes the engine before starting the game.
+It uses the given path to the executable to deduce the path to load the engine. The :js:meth:`engine.startGame`
+method is asynchronous and returns a ``Promise``. This allows your control code to track if
 the game was loaded correctly without blocking execution or relying on polling.
 
-In case your project needs to have special arguments passed to it by the start-up script, 
-:js:meth:`engine.startGame` can be replaced by :js:meth:`engine.start`. This method takes an 
-arbitrary list of string arguments. As it does not have a defined list of arguments, :js:meth:`engine.start` 
+In case your project needs to have special arguments passed to it by the start-up script,
+:js:meth:`engine.startGame` can be replaced by :js:meth:`engine.start`. This method takes an
+arbitrary list of string arguments. As it does not have a defined list of arguments, :js:meth:`engine.start`
 cannot automatically load the engine.
 
-To load the engine manually the :js:meth:`Engine.load` static method must be called. As 
-this method is static, multiple engine instances can be spawned with the exact same ``basePath``. 
+To load the engine manually the :js:meth:`Engine.load` static method must be called. As
+this method is static, multiple engine instances can be spawned with the exact same ``basePath``.
 If an instance requires a different ``basePath``, you can call the :js:meth:`engine.init`
 method with that path before starting the game.
 
 .. note:: Multiple instances cannot be spawned by default, as the engine is immediately unloaded after it is initialized.
-          To prevent this from happening the :js:meth:`engine.setUnloadAfterInit` method can be called. It is still possible 
-          to unload the engine manually afterwards by calling the :js:meth:`Engine.unload` static method. Unloading the engine 
+          To prevent this from happening the :js:meth:`engine.setUnloadAfterInit` method can be called. It is still possible
+          to unload the engine manually afterwards by calling the :js:meth:`Engine.unload` static method. Unloading the engine
           frees browser memory by unloading files that are no longer needed once the instance is initialized.
 
-To correctly load the engine on some hosting providers and network configurations you may 
-need to change the default filename extension by using :js:meth:`Engine.setWebAssemblyFilenameExtension`. 
-By default, the extension is assumed to be ``wasm``. If your hosting provider blocks this 
+To correctly load the engine on some hosting providers and network configurations you may
+need to change the default filename extension by using :js:meth:`Engine.setWebAssemblyFilenameExtension`.
+By default, the extension is assumed to be ``wasm``. If your hosting provider blocks this
 extension, this static method can be used to change it to something that is supported.
 
 .. code-block:: js
@@ -110,35 +110,35 @@ extension, this static method can be used to change it to something that is supp
     // Load mygame.dat as WebAssembly module.
     Engine.load("mygame");
 
-.. warning:: If a different filename extension is used, some web servers may automatically 
-             set the MIME-type of the file to something other than :mimetype:`application/wasm`. 
+.. warning:: If a different filename extension is used, some web servers may automatically
+             set the MIME-type of the file to something other than :mimetype:`application/wasm`.
              In that case some start-up optimizations may be skipped.
 
 Customizing the behavior
 ------------------------
 In the Web environment several methods can be used to guarantee that the game will work as intended.
 
-If you target a specific version of WebGL, or just want to check if WebGL is available at all, 
-you can call the :js:meth:`Engine.isWebGLAvailable` method. It optionally takes an argument that 
+If you target a specific version of WebGL, or just want to check if WebGL is available at all,
+you can call the :js:meth:`Engine.isWebGLAvailable` method. It optionally takes an argument that
 allows to test for a specific major version of WebGL.
 
-As the real executable file does not exist in the Web environment, the engine only stores a virtual 
-filename formed from the base name of loaded engine files. This value affects the output of the 
-:ref:`OS.get_executable_path() <class_OS_method_get_executable_path>` method and defines the name of 
+As the real executable file does not exist in the Web environment, the engine only stores a virtual
+filename formed from the base name of loaded engine files. This value affects the output of the
+:ref:`OS.get_executable_path() <class_OS_method_get_executable_path>` method and defines the name of
 the automatically started main pack. The :js:meth:`engine.setExecutableName` method can be used
 to override this value.
 
 If your project requires some files to be available the moment it is loaded, you can preload
-them by calling the :js:meth:`engine.preloadFile` method with a path to a file or by providing it 
-with an ``ArrayBuffer`` object. In case of the ``ArrayBuffer``, or one of its views, a second argument 
+them by calling the :js:meth:`engine.preloadFile` method with a path to a file or by providing it
+with an ``ArrayBuffer`` object. In case of the ``ArrayBuffer``, or one of its views, a second argument
 must be specified to define an internal path for the loaded resource.
 
 Customizing the presentation
 ----------------------------
 Several methods can be used to further customize the look and behavior of the game on your page.
 
-By default, the first canvas element on the page is used for rendering. To use a different canvas 
-element the :js:meth:`engine.setCanvas` method can be used. It requires a reference to the DOM 
+By default, the first canvas element on the page is used for rendering. To use a different canvas
+element the :js:meth:`engine.setCanvas` method can be used. It requires a reference to the DOM
 element itself.
 
 .. code-block:: js
@@ -146,12 +146,12 @@ element itself.
     const canvasElement = document.querySelector("#my-canvas-element");
     engine.setCanvas(canvasElement);
 
-If the width and height of this canvas element differ from values set in the project settings, it 
-will be resized on the project start. This behavior can be disabled by calling the :js:meth:`engine.setCanvasResizedOnStart` 
+If the width and height of this canvas element differ from values set in the project settings, it
+will be resized on the project start. This behavior can be disabled by calling the :js:meth:`engine.setCanvasResizedOnStart`
 method.
 
 If your game takes some time to load, it may be useful to display a custom loading UI which tracks
-the progress. This can be achieved with the :js:meth:`engine.setProgressFunc` method which allows 
+the progress. This can be achieved with the :js:meth:`engine.setProgressFunc` method which allows
 to set up a callback function to be called regularly as the engine loads new bytes.
 
 .. code-block:: js
@@ -163,19 +163,19 @@ to set up a callback function to be called regularly as the engine loads new byt
 
 Be aware that in some cases ``total`` can be ``0``. This means that it cannot be calculated.
 
-If your game supports multiple languages, the :js:meth:`engine.setLocale` method can be used to set 
-a specific locale, provided you have a valid language code string. It may be good to use server-side 
-logic to determine which languages a user may prefer. This way the language code can be taken from the 
+If your game supports multiple languages, the :js:meth:`engine.setLocale` method can be used to set
+a specific locale, provided you have a valid language code string. It may be good to use server-side
+logic to determine which languages a user may prefer. This way the language code can be taken from the
 ``Accept-Language`` HTTP header, or determined by a GeoIP service.
 
 Debugging
 ---------
-To debug exported projects, it may be useful to read the standard output and error streams generated 
-by the engine. This is similar to the output shown in the editor console window. By default, standard 
-``console.log`` and ``console.warn`` are used for the output and error streams respectively. This 
+To debug exported projects, it may be useful to read the standard output and error streams generated
+by the engine. This is similar to the output shown in the editor console window. By default, standard
+``console.log`` and ``console.warn`` are used for the output and error streams respectively. This
 behavior can be customized by setting your own functions to handle messages.
 
-Use the :js:meth:`engine.setStdoutFunc` method to set a callback function for the output stream. Default 
+Use the :js:meth:`engine.setStdoutFunc` method to set a callback function for the output stream. Default
 behavior is similar to this:
 
 .. code-block:: js
@@ -185,7 +185,7 @@ behavior is similar to this:
     }
     engine.setStdoutFunc(printStdout);
 
-Use the :js:meth:`engine.setStderrFunc` method to set a callback function for the error stream. Default 
+Use the :js:meth:`engine.setStderrFunc` method to set a callback function for the error stream. Default
 behavior is similar to this:
 
 .. code-block:: js
@@ -195,10 +195,10 @@ behavior is similar to this:
     }
     engine.setStderrFunc(printStderr);
 
-When handling the engine output keep in mind, that it may not be desirable to print it out in the 
-finished product. To control whether or not the current execution is actually a debug build you can 
+When handling the engine output keep in mind, that it may not be desirable to print it out in the
+finished product. To control whether or not the current execution is actually a debug build you can
 use ``$GODOT_DEBUG_ENABLED`` placeholder.
 
-Further debugging options and a low level access to the execution environment are available in a form 
-of Emscripten's ``Module`` object. It can be accessed using the :js:attr:`engine.rtenv` property on the 
+Further debugging options and a low level access to the execution environment are available in a form
+of Emscripten's ``Module`` object. It can be accessed using the :js:attr:`engine.rtenv` property on the
 engine instance.

+ 15 - 15
tutorials/platform/html5_shell_classref.rst

@@ -3,17 +3,17 @@
 HTML5 shell class reference
 ===========================
 
-Projects exported for the Web expose the ``Engine`` class to the JavaScript environment, that allows 
+Projects exported for the Web expose the ``Engine`` class to the JavaScript environment, that allows
 fine control over the engine's start-up process.
 
-This API is built in an asynchronous manner and requires basic understanding 
+This API is built in an asynchronous manner and requires basic understanding
 of `Promises <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises>`__.
 
 Engine
 ------
 
 The ``Engine`` class provides methods for loading and starting exported projects on the Web. For default export
-settings, this is already part of the exported HTML page. To understand practical use of the ``Engine`` class, 
+settings, this is already part of the exported HTML page. To understand practical use of the ``Engine`` class,
 see :ref:`Custom HTML page for Web export <doc_customizing_html5_shell>`.
 
 Static Methods
@@ -99,7 +99,7 @@ Static Method Descriptions
 
 .. js:method:: Engine.setWebAssemblyFilenameExtension(extension)
 
-    Set an alternative filename extension for the WebAssembly module. By default 
+    Set an alternative filename extension for the WebAssembly module. By default
     it is assumed to be ``wasm``.
 
     :param string extension:
@@ -113,7 +113,7 @@ Instance Property Descriptions
 
     The runtime environment provided by Emscripten's ``Module``. For more information
     refer to the `official documentation <https://emscripten.org/docs/api_reference/module.html>`__ on Emscripten.
-    
+
 Instance Method Descriptions
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -154,10 +154,10 @@ Instance Method Descriptions
 .. js:method:: engine.start([arg1, arg2, …])
 
     Start the instance of the engine, using the passed strings as
-    command line arguments. :js:meth:`engine.startGame` can be used 
+    command line arguments. :js:meth:`engine.startGame` can be used
     in typical cases instead.
-    
-    This will initialize the instance if it is not initialized. For manual 
+
+    This will initialize the instance if it is not initialized. For manual
     initialization, see :js:meth:`engine.init`. The engine must be loaded beforehand.
 
     Fails if a canvas cannot be found on the page.
@@ -172,10 +172,10 @@ Instance Method Descriptions
 
     Start the game instance using the given executable URL and main pack URL.
 
-    This will initialize the instance if it is not initialized. For manual 
+    This will initialize the instance if it is not initialized. For manual
     initialization, see :js:meth:`engine.init`.
 
-    This will load the engine if it is not loaded. The base path of the 
+    This will load the engine if it is not loaded. The base path of the
     executable URL will be used as the engine base path.
 
     :param string execName:
@@ -215,7 +215,7 @@ Instance Method Descriptions
 .. js:method:: engine.setLocale(locale)
 
     Specify a language code to select the proper localization for the game.
-    
+
     .. seealso:: Complete list of :ref:`supported locales <doc_locales>`.
 
     :param string locale:
@@ -223,7 +223,7 @@ Instance Method Descriptions
 
 .. js:method:: engine.setExecutableName(execName)
 
-    Specify the virtual filename of the executable. By default, the base name 
+    Specify the virtual filename of the executable. By default, the base name
     of the loaded engine files is used.
 
     This affects the output of :ref:`OS.get_executable_path() <class_OS_method_get_executable_path>`
@@ -246,12 +246,12 @@ Instance Method Descriptions
         multi-threading)
 
     :param function callback:
-        The callback function must accept two numeric arguments: the amount of bytes 
+        The callback function must accept two numeric arguments: the amount of bytes
         loaded so far, and the total number of bytes to load.
 
 .. js:method:: engine.setStdoutFunc(callback)
 
-    Specify a callback function for handling the standard output stream. This method 
+    Specify a callback function for handling the standard output stream. This method
     should usually only be used in debug pages. By default, ``console.log()`` is used.
 
     :param function callback:
@@ -259,7 +259,7 @@ Instance Method Descriptions
 
 .. js:method:: engine.setStderrFunc(callback)
 
-    Specify a callback function for handling the standard error stream. This method 
+    Specify a callback function for handling the standard error stream. This method
     should usually only be used in debug pages. By default, ``console.warn()`` is used.
 
     :param function callback:

+ 13 - 13
tutorials/plugins/android/android_plugin.rst

@@ -6,10 +6,10 @@ Creating Android plugins (Godot 4.0+)
 Introduction
 ------------
 
-Android plugins are powerful tools to extend the capabilities of the Godot engine 
-by tapping into the functionality provided by the Android platform and ecosystem. 
+Android plugins are powerful tools to extend the capabilities of the Godot engine
+by tapping into the functionality provided by the Android platform and ecosystem.
 
-Mobile gaming monetization is one such example since it requires features 
+Mobile gaming monetization is one such example since it requires features
 and capabilities that don't belong to the core feature set of a game engine:
 
 -  Analytics
@@ -40,7 +40,7 @@ the default, rendering plugins for Godot 3.2.0 incompatible with Godot 4.0.
 
 As a prerequisite, make sure you understand how to set up a :ref:`custom build environment<doc_android_custom_build>` for Android.
 
-At its core, a Godot Android plugin is a `Android archive library <https://developer.android.com/studio/projects/android-library#aar-contents>`_ (*aar* archive file) 
+At its core, a Godot Android plugin is a `Android archive library <https://developer.android.com/studio/projects/android-library#aar-contents>`_ (*aar* archive file)
 with the following caveats:
 
 -  The library must have a dependency on the Godot engine library (``godot-lib.x.y.aar``). A stable version is made available for each Godot release.
@@ -50,7 +50,7 @@ with the following caveats:
 Building a Android plugin
 ^^^^^^^^^^^^^^^^^^^^^^^^^
 
-**Prerequisite:** `Android Studio <https://developer.android.com/studio>`_ is strongly recommended as the IDE to use to create Android plugins. 
+**Prerequisite:** `Android Studio <https://developer.android.com/studio>`_ is strongly recommended as the IDE to use to create Android plugins.
 The instructions below assumes that you're using Android Studio.
 
 1.  Follow `these instructions <https://developer.android.com/studio/projects/android-library>`__ to create an Android library module for your plugin.
@@ -74,17 +74,17 @@ The instructions below assumes that you're using Android Studio.
     -   Add the ``<application></application>`` tag if it's missing.
 
     -   In the ``<application>`` tag, add a ``<meta-data>`` tag setup as follow::
-        
-            <meta-data 
-                android:name="org.godotengine.plugin.v1.[PluginName]" 
+
+            <meta-data
+                android:name="org.godotengine.plugin.v1.[PluginName]"
                 android:value="[plugin.init.ClassFullName]" />
 
         Where ``PluginName`` is the name of the plugin, and ``plugin.init.ClassFullName`` is the full name (package + class name) of the plugin loading class.
 
-5.  Add the remaining logic for your plugin and run the ``gradlew build`` command to generate the plugin's ``aar`` file. 
+5.  Add the remaining logic for your plugin and run the ``gradlew build`` command to generate the plugin's ``aar`` file.
     The build will likely generate both a ``debug`` and ``release`` ``aar`` files. Depending on your need, pick only one version (usually the ``release`` one) which to provide your users with.
 
-**Note:** The plugin's ``aar`` filename must match the following pattern: ``[PluginName]*.aar`` 
+**Note:** The plugin's ``aar`` filename must match the following pattern: ``[PluginName]*.aar``
 where ``PluginName`` is the name of the plugin in camel case (e.g: ``GodotPayment.release.aar``).
 
 Loading and using a Android plugin
@@ -106,12 +106,12 @@ Bundling GDNative resources
 A Android plugin can define and provide C/C++ GDNative resources, either to provide and/or access functionality from the game logic.
 The GDNative resources can be bundled within the plugin ``aar`` file which simplifies the distribution and deployment process:
 
-    -   The shared libraries (``.so``) for the defined GDNative libraries will be automatically bundled by the ``aar`` build system. 
+    -   The shared libraries (``.so``) for the defined GDNative libraries will be automatically bundled by the ``aar`` build system.
 
-    -   Godot ``*.gdnlib`` and ``*.gdns`` resource files must be manually defined in the plugin ``assets`` directory. 
+    -   Godot ``*.gdnlib`` and ``*.gdns`` resource files must be manually defined in the plugin ``assets`` directory.
         The recommended path for these resources relative to the ``assets`` directory should be: ``godot/plugin/v1/[PluginName]/``.
 
-For GDNative libraries, the plugin singleton object must override the ``org.godotengine.godot.plugin.GodotPlugin::getPluginGDNativeLibrariesPaths()`` method, 
+For GDNative libraries, the plugin singleton object must override the ``org.godotengine.godot.plugin.GodotPlugin::getPluginGDNativeLibrariesPaths()`` method,
 and return the paths to the bundled GDNative libraries config files (``*.gdnlib``). The paths must be relative to the ``assets`` directory.
 At runtime, the plugin will provide these paths to Godot core which will use them to load and initialize the bundled GDNative libraries.
 

+ 12 - 12
tutorials/plugins/editor/making_plugins.rst

@@ -28,7 +28,7 @@ The first thing you need for the editor to identify a new plugin is to
 create two files: a ``plugin.cfg`` for configuration and a tool script with the
 functionality. Plugins have a standard path like ``addons/plugin_name`` inside
 the project folder. Godot provides a dialog for generating those files and
-placing them where they need to be. 
+placing them where they need to be.
 
 In the main toolbar, click the ``Project`` dropdown. Then click
 ``Project Settings...``. Go to the ``Plugins`` tab and then click
@@ -87,7 +87,7 @@ like this:
 .. _doc_making_plugins_template_code:
 .. tabs::
  .. code-tab:: gdscript GDScript
- 
+
     tool
     extends EditorPlugin
 
@@ -100,7 +100,7 @@ like this:
         pass
 
  .. code-tab:: csharp
- 
+
     #if TOOLS
     using Godot;
     using System;
@@ -152,7 +152,7 @@ clicked. For that, we'll need a simple script that extends from
 
 .. tabs::
  .. code-tab:: gdscript GDScript
- 
+
     tool
     extends Button
 
@@ -163,7 +163,7 @@ clicked. For that, we'll need a simple script that extends from
         print("You clicked me!")
 
  .. code-tab:: csharp
- 
+
     using Godot;
     using System;
 
@@ -194,7 +194,7 @@ dialog. For that, change the ``custom_node.gd`` script to the following:
 
 .. tabs::
  .. code-tab:: gdscript GDScript
- 
+
     tool
     extends EditorPlugin
 
@@ -209,7 +209,7 @@ dialog. For that, change the ``custom_node.gd`` script to the following:
         remove_custom_type("MyButton")
 
  .. code-tab:: csharp
- 
+
     #if TOOLS
     using Godot;
     using System;
@@ -261,7 +261,7 @@ add the following content to it:
 
 .. tabs::
  .. code-tab:: gdscript GDScript
- 
+
     [plugin]
 
     name="My Custom Dock"
@@ -271,7 +271,7 @@ add the following content to it:
     script="custom_dock.gd"
 
  .. code-tab:: csharp
- 
+
     [plugin]
 
     name="My Custom Dock"
@@ -308,7 +308,7 @@ The script could look like this:
 
 .. tabs::
  .. code-tab:: gdscript GDScript
- 
+
     tool
     extends EditorPlugin
 
@@ -332,7 +332,7 @@ The script could look like this:
         dock.free()
 
  .. code-tab:: csharp
- 
+
     #if TOOLS
     using Godot;
     using System;
@@ -341,7 +341,7 @@ The script could look like this:
     public class CustomDock : EditorPlugin
     {
         Control dock;
-    
+
         public override void _EnterTree()
         {
             dock = (Control)GD.Load<PackedScene>("addons/my_custom_dock/my_dock.tscn").Instance();

+ 26 - 26
tutorials/plugins/editor/spatial_gizmos.rst

@@ -9,9 +9,9 @@ Introduction
 Spatial gizmo plugins are used by the editor and custom plugins to define the
 gizmos attached to any kind of Spatial node.
 
-This tutorial will show you the two main approaches to defining your own custom 
-gizmos. The first option works well for simple gizmos and creates less clutter in 
-your plugin structure, while the second one will let you store some per-gizmo data. 
+This tutorial will show you the two main approaches to defining your own custom
+gizmos. The first option works well for simple gizmos and creates less clutter in
+your plugin structure, while the second one will let you store some per-gizmo data.
 
 .. note:: This tutorial assumes you already know how to make generic plugins. If
           in doubt, refer to the :ref:`doc_making_plugins` page.
@@ -19,17 +19,17 @@ your plugin structure, while the second one will let you store some per-gizmo da
 The EditorSpatialGizmoPlugin
 ----------------------------
 
-Regardless of the approach we choose, we will need to create a new 
+Regardless of the approach we choose, we will need to create a new
 :ref:`EditorSpatialGizmoPlugin <class_EditorSpatialGizmoPlugin>`. This will allow
-us to set a name for the new gizmo type and define other behaviors such as whether 
+us to set a name for the new gizmo type and define other behaviors such as whether
 the gizmo can be hidden or not.
 
 This would be a basic setup:
 
 ::
-    
-    # MyCustomGizmoPlugin.gd 
-    
+
+    # MyCustomGizmoPlugin.gd
+
     extends EditorSpatialGizmoPlugin
 
     func get_name():
@@ -54,8 +54,8 @@ This would be a basic setup:
         remove_spatial_gizmo_plugin(gizmo_plugin)
 
 
-For simple gizmos, just inheriting :ref:`EditorSpatialGizmoPlugin <class_EditorSpatialGizmoPlugin>` 
-is enough. If you want to store some per-gizmo data or you are porting a Godot 3.0 gizmo 
+For simple gizmos, just inheriting :ref:`EditorSpatialGizmoPlugin <class_EditorSpatialGizmoPlugin>`
+is enough. If you want to store some per-gizmo data or you are porting a Godot 3.0 gizmo
 to 3.1+, you should go with the second approach.
 
 
@@ -66,7 +66,7 @@ The first step is to, in our custom gizmo plugin, override the :ref:`has_gizmo()
 method so that it returns ``true`` when the spatial parameter is of our target type.
 
 ::
-    
+
     # ...
 
     func has_gizmo(spatial):
@@ -77,7 +77,7 @@ Then we can override methods like :ref:`redraw()<class_EditorSpatialGizmoPlugin_
 or all the handle related ones.
 
 ::
-    
+
     # ...
 
     func _init():
@@ -88,9 +88,9 @@ or all the handle related ones.
         gizmo.clear()
 
         var spatial = gizmo.get_spatial_node()
-        
+
         var lines = PackedVector3Array()
-        
+
         lines.push_back(Vector3(0, 1, 0))
         lines.push_back(Vector3(0, spatial.my_custom_value, 0))
 
@@ -98,15 +98,15 @@ or all the handle related ones.
 
         handles.push_back(Vector3(0, 1, 0))
         handles.push_back(Vector3(0, spatial.my_custom_value, 0))
-        
+
         gizmo.add_lines(lines, get_material("main", gizmo), false)
         gizmo.add_handles(handles, get_material("handles", gizmo))
 
     # ...
 
 Note that we created a material in the `_init` method, and retrieved it in the `redraw`
-method using :ref:`get_material()<class_EditorSpatialGizmoPlugin_method_get_material>`. This 
-method retrieves one of the material's variants depending on the state of the gizmo 
+method using :ref:`get_material()<class_EditorSpatialGizmoPlugin_method_get_material>`. This
+method retrieves one of the material's variants depending on the state of the gizmo
 (selected and/or editable).
 
 So the final plugin would look somewhat like this:
@@ -128,9 +128,9 @@ So the final plugin would look somewhat like this:
         gizmo.clear()
 
         var spatial = gizmo.get_spatial_node()
-        
+
         var lines = PackedVector3Array()
-        
+
         lines.push_back(Vector3(0, 1, 0))
         lines.push_back(Vector3(0, spatial.my_custom_value, 0))
 
@@ -138,7 +138,7 @@ So the final plugin would look somewhat like this:
 
         handles.push_back(Vector3(0, 1, 0))
         handles.push_back(Vector3(0, spatial.my_custom_value, 0))
-        
+
         gizmo.add_lines(lines, get_material("main", gizmo), false)
         gizmo.add_handles(handles, get_material("handles", gizmo))
 
@@ -153,16 +153,16 @@ Alternative approach
 --------------------
 
 In some cases we want to provide our own implementation of :ref:`EditorSpatialGizmo<class_EditorSpatialGizmo>`,
-maybe because we want to have some state stored in each gizmo or because we are porting 
+maybe because we want to have some state stored in each gizmo or because we are porting
 an old gizmo plugin and we don't want to go through the rewriting process.
 
-In these cases all we need to do is, in our new gizmo plugin, override 
+In these cases all we need to do is, in our new gizmo plugin, override
 :ref:`create_gizmo()<class_EditorSpatialGizmoPlugin_method_create_gizmo>`, so it returns our custom gizmo implementation
 for the Spatial nodes we want to target.
 
 ::
 
-    # MyCustomGizmoPlugin.gd 
+    # MyCustomGizmoPlugin.gd
     extends EditorSpatialGizmoPlugin
 
     const MyCustomSpatial = preload("res://addons/my-addon/MyCustomSpatial.gd")
@@ -187,16 +187,16 @@ This way all the gizmo logic and drawing methods can be implemented in a new cla
 
     extends EditorSpatialGizmo
 
-    # You can store data in the gizmo itself (more useful when working with handles)  
+    # You can store data in the gizmo itself (more useful when working with handles)
     var gizmo_size = 3.0
 
     func redraw():
         clear()
 
         var spatial = get_spatial_node()
-        
+
         var lines = PackedVector3Array()
-        
+
         lines.push_back(Vector3(0, 1, 0))
         lines.push_back(Vector3(gizmo_size, spatial.my_custom_value, 0))
 

+ 8 - 8
tutorials/plugins/editor/visual_shader_plugins.rst

@@ -118,11 +118,11 @@ all you need to initialize your plugin.
                 vec4 iy = vec4(Pi0.yy, Pi1.yy);
                 vec4 iz0 = vec4(Pi0.z);
                 vec4 iz1 = vec4(Pi1.z);
-                
+
                 vec4 ixy = permute(permute(ix) + iy);
                 vec4 ixy0 = permute(ixy + iz0);
                 vec4 ixy1 = permute(ixy + iz1);
-                
+
                 vec4 gx0 = ixy0 * (1.0 / 7.0);
                 vec4 gy0 = fract(floor(gx0) * (1.0 / 7.0)) - 0.5;
                 gx0 = fract(gx0);
@@ -130,7 +130,7 @@ all you need to initialize your plugin.
                 vec4 sz0 = step(gz0, vec4(0.0));
                 gx0 -= sz0 * (step(0.0, gx0) - 0.5);
                 gy0 -= sz0 * (step(0.0, gy0) - 0.5);
-                
+
                 vec4 gx1 = ixy1 * (1.0 / 7.0);
                 vec4 gy1 = fract(floor(gx1) * (1.0 / 7.0)) - 0.5;
                 gx1 = fract(gx1);
@@ -138,7 +138,7 @@ all you need to initialize your plugin.
                 vec4 sz1 = step(gz1, vec4(0.0));
                 gx1 -= sz1 * (step(0.0, gx1) - 0.5);
                 gy1 -= sz1 * (step(0.0, gy1) - 0.5);
-                
+
                 vec3 g000 = vec3(gx0.x, gy0.x, gz0.x);
                 vec3 g100 = vec3(gx0.y, gy0.y, gz0.y);
                 vec3 g010 = vec3(gx0.z, gy0.z, gz0.z);
@@ -147,7 +147,7 @@ all you need to initialize your plugin.
                 vec3 g101 = vec3(gx1.y, gy1.y, gz1.y);
                 vec3 g011 = vec3(gx1.z, gy1.z, gz1.z);
                 vec3 g111 = vec3(gx1.w, gy1.w, gz1.w);
-                
+
                 vec4 norm0 = taylorInvSqrt(vec4(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110)));
                 g000 *= norm0.x;
                 g010 *= norm0.y;
@@ -158,7 +158,7 @@ all you need to initialize your plugin.
                 g011 *= norm1.y;
                 g101 *= norm1.z;
                 g111 *= norm1.w;
-                
+
                 float n000 = dot(g000, Pf0);
                 float n100 = dot(g100, vec3(Pf1.x, Pf0.yz));
                 float n010 = dot(g010, vec3(Pf0.x, Pf1.y, Pf0.z));
@@ -167,11 +167,11 @@ all you need to initialize your plugin.
                 float n101 = dot(g101, vec3(Pf1.x, Pf0.y, Pf1.z));
                 float n011 = dot(g011, vec3(Pf0.x, Pf1.yz));
                 float n111 = dot(g111, Pf1);
-                
+
                 vec3 fade_xyz = fade(Pf0);
                 vec4 n_z = mix(vec4(n000, n100, n010, n110), vec4(n001, n101, n011, n111), fade_xyz.z);
                 vec2 n_yz = mix(n_z.xy, n_z.zw, fade_xyz.y);
-                float n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); 
+                float n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x);
                 return 2.2 * n_xyz;
             }
         """

+ 1 - 1
tutorials/plugins/gdnative/gdnative-cpp-example.rst

@@ -132,7 +132,7 @@ To generate and compile the bindings, use this command (replacing ``<platform>``
 with ``windows``, ``linux`` or ``osx`` depending on your OS):
 
 To speed up compilation, add `-jN` at the end of the SCons command line where `N` is the number of CPU threads you have on your system. The example below uses 4 threads.
- 
+
 .. code-block:: none
 
     cd godot-cpp

+ 51 - 51
tutorials/shading/advanced_postprocessing.rst

@@ -6,45 +6,45 @@ Advanced post-processing
 Introduction
 ------------
 
-This tutorial describes an advanced method for post-processing in Godot. 
-In particular, it will explain how to write a post-processing shader that 
-uses the depth buffer. You should already be familiar with post-processing 
+This tutorial describes an advanced method for post-processing in Godot.
+In particular, it will explain how to write a post-processing shader that
+uses the depth buffer. You should already be familiar with post-processing
 generally and, in particular, with the methods outlined in the :ref:`custom post-processing tutorial <doc_custom_postprocessing>`.
 
-In the previous post-processing tutorial, we rendered the scene to a :ref:`Viewport <class_Viewport>` 
-and then rendered the Viewport in a :ref:`ViewportContainer <class_ViewportContainer>` 
-to the main scene. One limitation of this method is that we could not access the 
-depth buffer because the depth buffer is only available in spatial shaders and 
+In the previous post-processing tutorial, we rendered the scene to a :ref:`Viewport <class_Viewport>`
+and then rendered the Viewport in a :ref:`ViewportContainer <class_ViewportContainer>`
+to the main scene. One limitation of this method is that we could not access the
+depth buffer because the depth buffer is only available in spatial shaders and
 Viewports do not maintain depth information.
 
 Full screen quad
 ----------------
 
-In the :ref:`custom post-processing tutorial <doc_custom_postprocessing>`, we 
-covered how to use a Viewport to make custom post-processing effects. There are 
+In the :ref:`custom post-processing tutorial <doc_custom_postprocessing>`, we
+covered how to use a Viewport to make custom post-processing effects. There are
 two main drawbacks of using a Viewport:
 
 1. The depth buffer cannot be accessed
 2. The effect of the post-processing shader is not visible in the editor
 
-To get around the limitation on using the depth buffer, use a :ref:`MeshInstance <class_MeshInstance>` 
-with a :ref:`QuadMesh <class_QuadMesh>` primitive. This allows us to use a spatial 
-shader and to access the depth texture of the scene. Next, use a vertex shader 
-to make the quad cover the screen at all times so that the post-processing 
+To get around the limitation on using the depth buffer, use a :ref:`MeshInstance <class_MeshInstance>`
+with a :ref:`QuadMesh <class_QuadMesh>` primitive. This allows us to use a spatial
+shader and to access the depth texture of the scene. Next, use a vertex shader
+to make the quad cover the screen at all times so that the post-processing
 effect will be applied at all times, including in the editor.
 
-First, create a new MeshInstance and set its mesh to a QuadMesh. This creates a quad 
+First, create a new MeshInstance and set its mesh to a QuadMesh. This creates a quad
 centered at position ``(0, 0, 0)`` with a width and height of ``1``. Set the width
-and height to ``2``. Right now, the quad occupies a position in world space at the 
-origin; however, we want it to move with the camera so that it always covers the 
-entire screen. To do this, we will bypass the coordinate transforms that translate 
-the vertex positions through the difference coordinate spaces and treat the vertices 
-as if they were already in clip space. 
-
-The vertex shader expects coordinates to be output in clip space, which are coordinates 
-ranging from ``-1`` at the left and bottom of the screen to ``1`` at the top and right 
-of the screen. This is why the QuadMesh needs to have height and width of ``2``. 
-Godot handles the transform from model to view space to clip space behind the scenes, 
+and height to ``2``. Right now, the quad occupies a position in world space at the
+origin; however, we want it to move with the camera so that it always covers the
+entire screen. To do this, we will bypass the coordinate transforms that translate
+the vertex positions through the difference coordinate spaces and treat the vertices
+as if they were already in clip space.
+
+The vertex shader expects coordinates to be output in clip space, which are coordinates
+ranging from ``-1`` at the left and bottom of the screen to ``1`` at the top and right
+of the screen. This is why the QuadMesh needs to have height and width of ``2``.
+Godot handles the transform from model to view space to clip space behind the scenes,
 so we need to nullify the effects of Godot's transformations. We do this by setting the
 ``POSITION`` built-in to our desired position. ``POSITION`` bypasses the built-in transformations
 and sets the vertex position directly.
@@ -57,12 +57,12 @@ and sets the vertex position directly.
     POSITION = vec4(VERTEX, 1.0);
   }
 
-Even with this vertex shader, the quad keeps disappearing. This is due to frustum 
+Even with this vertex shader, the quad keeps disappearing. This is due to frustum
 culling, which is done on the CPU. Frustum culling uses the camera matrix and the
 AABBs of Meshes to determine if the Mesh will be visible *before* passing it to the GPU.
-The CPU has no knowledge of what we are doing with the vertices, so it assumes the 
+The CPU has no knowledge of what we are doing with the vertices, so it assumes the
 coordinates specified refer to world positions, not clip space positions, which results
-in Godot culling the quad when we turn away from the center of the scene. In 
+in Godot culling the quad when we turn away from the center of the scene. In
 order to keep the quad from being culled, there are a few options:
 
 1. Add the QuadMesh as a child to the camera, so the camera is always pointed at it
@@ -83,22 +83,22 @@ the uniform variable ``DEPTH_TEXTURE``.
   float depth = texture(DEPTH_TEXTURE, SCREEN_UV).x;
 
 .. note:: Similar to accessing the screen texture, accessing the depth texture is only
-          possible when reading from the current viewport. The depth texture cannot be 
+          possible when reading from the current viewport. The depth texture cannot be
           accessed from another viewport to which you have rendered.
 
-The values returned by ``DEPTH_TEXTURE`` are between ``0`` and ``1`` and are nonlinear. 
-When displaying depth directly from the ``DEPTH_TEXTURE``, everything will look almost 
+The values returned by ``DEPTH_TEXTURE`` are between ``0`` and ``1`` and are nonlinear.
+When displaying depth directly from the ``DEPTH_TEXTURE``, everything will look almost
 white unless it is very close. This is because the depth buffer stores objects closer
 to the camera using more bits than those further, so most of the detail in depth
-buffer is found close to the camera. In order to make the depth value align with world or 
-model coordinates, we need to linearize the value. When we apply the projection matrix to the 
-vertex position, the z value is made nonlinear, so to linearize it, we multiply it by the 
-inverse of the projection matrix, which in Godot, is accessible with the variable 
+buffer is found close to the camera. In order to make the depth value align with world or
+model coordinates, we need to linearize the value. When we apply the projection matrix to the
+vertex position, the z value is made nonlinear, so to linearize it, we multiply it by the
+inverse of the projection matrix, which in Godot, is accessible with the variable
 ``INV_PROJECTION_MATRIX``.
 
-Firstly, take the screen space coordinates and transform them into normalized device 
-coordinates (NDC). NDC run from ``-1`` to ``1``, similar to clip space coordinates. 
-Reconstruct the NDC using ``SCREEN_UV`` for the ``x`` and ``y`` axis, and 
+Firstly, take the screen space coordinates and transform them into normalized device
+coordinates (NDC). NDC run from ``-1`` to ``1``, similar to clip space coordinates.
+Reconstruct the NDC using ``SCREEN_UV`` for the ``x`` and ``y`` axis, and
 the depth value for ``z``.
 
 .. code-block:: glsl
@@ -125,7 +125,7 @@ Because the camera is facing the negative ``z`` direction, the position will hav
 In order to get a usable depth value, we have to negate ``view.z``.
 
 The world position can be constructed from the depth buffer using the following code. Note
-that the ``CAMERA_MATRIX`` is needed to transform the position from view space into world space, so 
+that the ``CAMERA_MATRIX`` is needed to transform the position from view space into world space, so
 it needs to be passed to the fragment shader with a varying.
 
 .. code-block:: glsl
@@ -145,12 +145,12 @@ it needs to be passed to the fragment shader with a varying.
 An optimization
 ---------------
 
-You can benefit from using a single large triangle rather than using a full 
-screen quad. The reason for this is explained `here <https://michaldrobot.com/2014/04/01/gcn-execution-patterns-in-full-screen-passes>`_. 
-However, the benefit is quite small and only beneficial when running especially 
-complex fragment shaders. 
+You can benefit from using a single large triangle rather than using a full
+screen quad. The reason for this is explained `here <https://michaldrobot.com/2014/04/01/gcn-execution-patterns-in-full-screen-passes>`_.
+However, the benefit is quite small and only beneficial when running especially
+complex fragment shaders.
 
-Set the Mesh in the MeshInstance to an :ref:`ArrayMesh <class_ArrayMesh>`. An 
+Set the Mesh in the MeshInstance to an :ref:`ArrayMesh <class_ArrayMesh>`. An
 ArrayMesh is a tool that allows you to easily construct a Mesh from Arrays for
 vertices, normals, colors, etc.
 
@@ -166,25 +166,25 @@ Now, attach a script to the MeshInstance and use the following code:
     verts.append(Vector3(-1.0, -1.0, 0.0))
     verts.append(Vector3(-1.0, 3.0, 0.0))
     verts.append(Vector3(3.0, -1.0, 0.0))
-    
+
     # Create an array of arrays.
     # This could contain normals, colors, UVs, etc.
     var mesh_array = []
     mesh_array.resize(Mesh.ARRAY_MAX) #required size for ArrayMesh Array
     mesh_array[Mesh.ARRAY_VERTEX] = verts #position of vertex array in ArrayMesh Array
-    
+
     # Create mesh from mesh_array:
     mesh.add_surface_from_arrays(Mesh.PRIMITIVE_TRIANGLES, mesh_array)
 
-.. note:: The triangle is specified in normalized device coordinates. Recall, NDC run 
+.. note:: The triangle is specified in normalized device coordinates. Recall, NDC run
           from ``-1`` to ``1`` in both the ``x`` and ``y`` directions. This makes the screen
-          ``2`` units wide and ``2`` units tall. In order to cover the entire screen with 
-          a single triangle, use a triangle that is ``4`` units wide and ``4`` 
+          ``2`` units wide and ``2`` units tall. In order to cover the entire screen with
+          a single triangle, use a triangle that is ``4`` units wide and ``4``
           units tall, double its height and width.
 
 Assign the same vertex shader from above and everything should look exactly the same.
 
-The one drawback to using an ArrayMesh over using a QuadMesh is that the ArrayMesh 
-is not visible in the editor because the triangle is not constructed until the scene 
-is run. To get around that, construct a single triangle Mesh in a modelling program 
+The one drawback to using an ArrayMesh over using a QuadMesh is that the ArrayMesh
+is not visible in the editor because the triangle is not constructed until the scene
+is run. To get around that, construct a single triangle Mesh in a modelling program
 and use that in the MeshInstance instead.

+ 3 - 3
tutorials/shading/godot_shader_language_style_guide.rst

@@ -38,11 +38,11 @@ Here is a complete shader example based on these guidelines:
 
     void fragment() {
         vec3 c = textureLod(SCREEN_TEXTURE, SCREEN_UV, 0.0).rgb;
-        
+
         c.rgb = mix(vec3(0.0), c.rgb, brightness);
         c.rgb = mix(vec3(0.5), c.rgb, contrast);
         c.rgb = mix(vec3(dot(vec3(1.0), c.rgb) * 0.33333), c.rgb, saturation);
-        
+
         COLOR.rgb = c;
     }
 
@@ -331,7 +331,7 @@ We suggest to organize shader code this way:
     03. uniforms
     04. constants
     05. varyings
-    
+
     06. other functions
     07. vertex() function
     08. fragment() function

+ 4 - 4
tutorials/shading/shader_materials.rst

@@ -7,8 +7,8 @@ Introduction
 ------------
 
 For the most common cases, Godot provides ready to use materials for
-most types of shaders, such as :ref:`StandardMaterial3D <class_StandardMaterial3D>`, 
-:ref:`CanvasItemMaterial <class_CanvasItemMaterial>` and :ref:`ParticlesMaterial <class_ParticlesMaterial>`. 
+most types of shaders, such as :ref:`StandardMaterial3D <class_StandardMaterial3D>`,
+:ref:`CanvasItemMaterial <class_CanvasItemMaterial>` and :ref:`ParticlesMaterial <class_ParticlesMaterial>`.
 They are flexible implementations that cover most use cases.
 
 Shader materials allow writing a custom shader directly, for maximum flexibility.
@@ -23,7 +23,7 @@ Examples of this are:
 -  Create custom particle code.
 -  And much more!
 
-Godot provides built in functionality to make frequent operations 
+Godot provides built in functionality to make frequent operations
 easier. Additionally, Godot's shader editor will detect errors as you
 type, so you can see your edited shaders in real-time. It is also
 possible to edit shaders using a visual, node-based graph editor.
@@ -76,6 +76,6 @@ and select the convert option.
 .. image:: img/shader_material_convert.png
 
 .. note::
-    
+
    Using the convert option will turn the StandardMaterial3D into a ShaderMaterial
    with a text shader, not a visual shader.

+ 18 - 18
tutorials/shading/shading_reference/canvas_item_shader.rst

@@ -6,8 +6,8 @@ CanvasItem shaders
 CanvasItem shaders are used to draw all 2D elements in Godot. These include
 all nodes that inherit from CanvasItems, and all GUI elements.
 
-CanvasItem shaders contain less built-in variables and functionality than Spatial 
-shaders, but they maintain the same basic structure with vertex, fragment, and 
+CanvasItem shaders contain less built-in variables and functionality than Spatial
+shaders, but they maintain the same basic structure with vertex, fragment, and
 light processor functions.
 
 Render modes
@@ -38,9 +38,9 @@ Render modes
 Built-ins
 ^^^^^^^^^
 
-Values marked as "in" are read-only. Values marked as "out" are for optional writing and will 
-not necessarily contain sensible values. Values marked as "inout" provide a sensible default 
-value, and can optionally be written to. Samplers are not subjects of writing and they are 
+Values marked as "in" are read-only. Values marked as "out" are for optional writing and will
+not necessarily contain sensible values. Values marked as "inout" provide a sensible default
+value, and can optionally be written to. Samplers are not subjects of writing and they are
 not marked.
 
 Global built-ins
@@ -61,7 +61,7 @@ Vertex built-ins
 Vertex data (``VERTEX``) is presented in local space (pixel coordinates, relative to the camera).
 If not written to, these values will not be modified and be passed through as they came.
 
-The user can disable the built-in modelview transform (projection will still happen later) and do 
+The user can disable the built-in modelview transform (projection will still happen later) and do
 it manually with the following code:
 
 .. code-block:: glsl
@@ -80,22 +80,22 @@ it manually with the following code:
 In order to get the world space coordinates of a vertex, you have to pass in a custom uniform like so:
 
 ::
-  
+
     material.set_shader_param("global_transform", get_global_transform())
 
 
 Then, in your vertex shader:
 
-.. code-block:: glsl 
-  
+.. code-block:: glsl
+
     uniform mat4 global_transform;
     varying vec2 world_position;
-  
+
     void vertex(){
         world_position = (global_transform * vec4(VERTEX, 0.0, 1.0)).xy;
     }
 
-``world_position`` can then be used in either the vertex or fragment functions. 
+``world_position`` can then be used in either the vertex or fragment functions.
 
 Other built-ins, such as UV and COLOR, are also passed through to the fragment function if not modified.
 
@@ -135,9 +135,9 @@ is usually:
 Fragment built-ins
 ^^^^^^^^^^^^^^^^^^
 
-Certain Nodes (for example, :ref:`Sprites <class_Sprite>`) display a texture by default. However, 
-when a custom fragment function is attached to these nodes, the texture lookup needs to be done 
-manually. Godot does not provide the texture color in the ``COLOR`` built-in variable; to read 
+Certain Nodes (for example, :ref:`Sprites <class_Sprite>`) display a texture by default. However,
+when a custom fragment function is attached to these nodes, the texture lookup needs to be done
+manually. Godot does not provide the texture color in the ``COLOR`` built-in variable; to read
 the texture color for such nodes, use:
 
 .. code-block:: glsl
@@ -163,7 +163,7 @@ it to the ``NORMALMAP`` property. Godot will handle converting it for use in 2D
 | inout vec3 **NORMAL**            | Normal read from **NORMAL_TEXTURE**. Writable.                 |
 +----------------------------------+----------------------------------------------------------------+
 | out vec3 **NORMALMAP**           | Configures normal maps meant for 3D for use in 2D. If used,    |
-|                                  | overwrites **NORMAL**.                                         | 
+|                                  | overwrites **NORMAL**.                                         |
 +----------------------------------+----------------------------------------------------------------+
 | inout float **NORMALMAP_DEPTH**  | Normalmap depth for scaling.                                   |
 +----------------------------------+----------------------------------------------------------------+
@@ -194,11 +194,11 @@ it to the ``NORMALMAP`` property. Godot will handle converting it for use in 2D
 Light built-ins
 ^^^^^^^^^^^^^^^
 
-Light processor functions work differently in 2D than they do in 3D. In CanvasItem shaders, the 
-shader is called once for the object being drawn, and then once for each light touching that 
+Light processor functions work differently in 2D than they do in 3D. In CanvasItem shaders, the
+shader is called once for the object being drawn, and then once for each light touching that
 object in the scene. Use render_mode ``unshaded`` if you do not want any light passes to occur
 for that object. Use render_mode ``light_only`` if you only want light passes to occur for
-that object; this can be useful when you only want the object visible where it is covered by light. 
+that object; this can be useful when you only want the object visible where it is covered by light.
 
 When the shader is on a light pass, the ``AT_LIGHT_PASS`` variable will be ``true``.
 

+ 9 - 9
tutorials/shading/shading_reference/particle_shader.rst

@@ -3,14 +3,14 @@
 Particle shaders
 ================
 
-Particle shaders are a special type of vertex shader that runs before the 
-object is drawn. They are used for calculating material properties such as 
-color, position, and rotation. They are drawn with any regular material for 
+Particle shaders are a special type of vertex shader that runs before the
+object is drawn. They are used for calculating material properties such as
+color, position, and rotation. They are drawn with any regular material for
 CanvasItem or Spatial, depending on whether they are 2D or 3D.
 
-Particle shaders are unique because they are not used to draw the object 
-itself; they are used to calculate particle properties, which are then used 
-by the CanvasItem of Spatial shader. They contain only a vertex processor 
+Particle shaders are unique because they are not used to draw the object
+itself; they are used to calculate particle properties, which are then used
+by the CanvasItem of Spatial shader. They contain only a vertex processor
 function that outputs multiple properties (see built-ins below).
 
 Particle shaders use a transform feedback shader, which is a special type of
@@ -39,9 +39,9 @@ Render modes
 Built-ins
 ^^^^^^^^^
 
-Values marked as "in" are read-only. Values marked as "out" are for optional writing and will 
-not necessarily contain sensible values. Values marked as "inout" provide a sensible default 
-value, and can optionally be written to. Samplers are not subjects of writing and they are 
+Values marked as "in" are read-only. Values marked as "out" are for optional writing and will
+not necessarily contain sensible values. Values marked as "inout" provide a sensible default
+value, and can optionally be written to. Samplers are not subjects of writing and they are
 not marked.
 
 Global built-ins

+ 15 - 15
tutorials/shading/shading_reference/shaders.rst

@@ -26,8 +26,8 @@ reference of the shading language in Godot see the :ref:`Godot shading language
 Shader types
 ------------
 
-Instead of supplying a general purpose configuration for all uses (2D, 3D, particles), 
-Godot shaders must specify what they are intended for. Different types support different 
+Instead of supplying a general purpose configuration for all uses (2D, 3D, particles),
+Godot shaders must specify what they are intended for. Different types support different
 render modes, built-in variables, and processing functions.
 
 All shaders need to specify their type in the first line, in the following format:
@@ -61,7 +61,7 @@ Render modes are specified underneath the shader type:
     render_mode unshaded, cull_disabled;
 
 Each shader type has a different list of render modes available. See the document for each shader
-type for a complete list of render modes. 
+type for a complete list of render modes.
 
 Processor functions
 -------------------
@@ -73,41 +73,41 @@ For "particles", only ``vertex`` can be overridden.
 Vertex processor
 ^^^^^^^^^^^^^^^^
 
-The ``vertex`` processing function is called once for every vertex in "spatial" and "canvas_item" shaders. 
+The ``vertex`` processing function is called once for every vertex in "spatial" and "canvas_item" shaders.
 For "particles" shaders, it is called once for every particle.
 
-The ``vertex`` function is used to modify per-vertex information that will be passed on to the fragment 
-function. It can also be used to establish variables that will be sent to the fragment function by using 
+The ``vertex`` function is used to modify per-vertex information that will be passed on to the fragment
+function. It can also be used to establish variables that will be sent to the fragment function by using
 varyings(see other doc).
 
 By default, Godot will take your vertex information and transform it accordingly to be drawn. If this is
-undesirable, you can use render modes to transform the data yourself; see the 
+undesirable, you can use render modes to transform the data yourself; see the
 :ref:`Spatial shader doc <doc_spatial_shader>` for an example of this.
 
 Fragment processor
 ^^^^^^^^^^^^^^^^^^
 
 The ``fragment`` processing function is used to set up the Godot material parameters per pixel. This code
-runs on every visible pixel the object or primitive draws. It is only available in "spatial" and 
+runs on every visible pixel the object or primitive draws. It is only available in "spatial" and
 "canvas_item" shaders.
 
-The standard use of the fragment function is to set up material properties that will be used to calculate 
+The standard use of the fragment function is to set up material properties that will be used to calculate
 lighting. For example, you would set values for ``ROUGHNESS``, ``RIM``, or ``TRANSMISSION`` which would
 tell the light function how the lights respond to that fragment. This makes it possible to control a complex
 shading pipeline without the user having to write much code. If you don't need this built-in functionality,
-you can ignore it and write your own light processing function and Godot will optimize it away. For example, 
+you can ignore it and write your own light processing function and Godot will optimize it away. For example,
 if you do not write a value to ``RIM``, Godot will not calculate rim lighting. During compilation, Godot checks
-to see if ``RIM`` is used; if not, it cuts all the corresponding code out. Therefore, you will not 
-waste calculations on effects that you do not use. 
+to see if ``RIM`` is used; if not, it cuts all the corresponding code out. Therefore, you will not
+waste calculations on effects that you do not use.
 
 Light processor
 ^^^^^^^^^^^^^^^
 
-The ``light`` processor runs per pixel too, but also runs for every light that affects the object 
-(and does not run if no lights affect the object). It exists as a function called inside the 
+The ``light`` processor runs per pixel too, but also runs for every light that affects the object
+(and does not run if no lights affect the object). It exists as a function called inside the
 ``fragment`` processor and typically operates on the material properties setup inside the ``fragment``
 function.
 
 The ``light`` processor works differently in 2D than it does in 3D; for a description of how it works
-in each, see their documentation, :ref:`CanvasItem shaders <doc_canvas_item_shader>` and 
+in each, see their documentation, :ref:`CanvasItem shaders <doc_canvas_item_shader>` and
 :ref:`Spatial shaders <doc_spatial_shader>`, respectively.

+ 25 - 25
tutorials/shading/shading_reference/shading_language.rst

@@ -106,13 +106,13 @@ Default integer constants are signed, so casting is always needed to convert to
 Members
 ~~~~~~~
 
-Individual scalar members of vector types are accessed via the "x", "y", "z" and "w" members. 
-Alternatively, using "r", "g", "b" and "a" also works and is equivalent. Use whatever fits 
+Individual scalar members of vector types are accessed via the "x", "y", "z" and "w" members.
+Alternatively, using "r", "g", "b" and "a" also works and is equivalent. Use whatever fits
 best for your needs.
 
-For matrices, use the ``m[row][column]`` indexing syntax to access each scalar, or ``m[idx]`` to access 
-a vector by row index. For example, for accessing the y position of an object in a mat4 you  use 
-``m[3][1]``.  
+For matrices, use the ``m[row][column]`` indexing syntax to access each scalar, or ``m[idx]`` to access
+a vector by row index. For example, for accessing the y position of an object in a mat4 you  use
+``m[3][1]``.
 
 Constructing
 ~~~~~~~~~~~~
@@ -129,8 +129,8 @@ Construction of vector types must always pass:
     // A single scalar for the whole vector
     vec4 a = vec4(0.0);
 
-Construction of matrix types requires vectors of the same dimension as the matrix. You can 
-also build a diagonal matrix using ``matx(float)`` syntax. Accordingly, ``mat4(1.0)`` is 
+Construction of matrix types requires vectors of the same dimension as the matrix. You can
+also build a diagonal matrix using ``matx(float)`` syntax. Accordingly, ``mat4(1.0)`` is
 an identity matrix.
 
 .. code-block:: glsl
@@ -141,12 +141,12 @@ an identity matrix.
 
 Matrices can also be built from a matrix of another dimension.
 There are two rules :
-If a larger matrix is constructed from a smaller matrix, the additional rows and columns are 
-set to the values they would have in an identity matrix. If a smaller matrix is constructed 
+If a larger matrix is constructed from a smaller matrix, the additional rows and columns are
+set to the values they would have in an identity matrix. If a smaller matrix is constructed
 from a larger matrix, the top, left submatrix of the larger matrix is used.
 
 .. code-block:: glsl
-	
+
 	mat3 basis = mat3(WORLD_MATRIX);
 	mat4 m4 = mat4(basis);
 	mat2 m2 = mat2(m4);
@@ -154,7 +154,7 @@ from a larger matrix, the top, left submatrix of the larger matrix is used.
 Swizzling
 ~~~~~~~~~
 
-It is possible to obtain any combination of components in any order, as long as the result 
+It is possible to obtain any combination of components in any order, as long as the result
 is another vector type (or scalar). This is easier shown than explained:
 
 .. code-block:: glsl
@@ -183,12 +183,12 @@ It is possible to add precision modifiers to datatypes; use them for uniforms, v
 
 
 Using lower precision for some operations can speed up the math involved (at the cost of less precision).
-This is rarely needed in the vertex processor function (where full precision is needed most of the time), 
+This is rarely needed in the vertex processor function (where full precision is needed most of the time),
 but is often useful in the fragment processor.
 
-Keep in mind that some architectures (mainly mobile) benefit a lot from this, but are also restricted 
-(conversion between precisions has a cost). Please read the relevant documentation on the target architecture 
-to find out more. In all honesty though, mobile drivers are buggy, so, to stay out of trouble, make simple 
+Keep in mind that some architectures (mainly mobile) benefit a lot from this, but are also restricted
+(conversion between precisions has a cost). Please read the relevant documentation on the target architecture
+to find out more. In all honesty though, mobile drivers are buggy, so, to stay out of trouble, make simple
 shaders without specifying precision unless you *really* need to.
 
 Arrays
@@ -236,9 +236,9 @@ To access an array element, use the indexing syntax:
 .. code-block:: glsl
 
       float arr[3];
-      
+
       arr[0] = 1.0; // setter
-      
+
       COLOR.r = arr[0]; // getter
 
 Arrays also have a built-in function ``.length()`` (not to be confused with the built-in ``length()`` function). It doesn't accept any parameters and will return the array's size.
@@ -285,7 +285,7 @@ Constants can be declared both globally (outside of any function) or locally (in
 Global constants are useful when you want to have access to a value throughout your shader that does not need to be modified. Like uniforms, global constants are shared between all shader stages, but they are not accessible outside of the shader.
 
 .. code-block:: glsl
-    
+
     shader_type spatial;
 
     const float PI = 3.14159265358979323846;
@@ -354,7 +354,7 @@ You can also pass them to functions:
     }
 
     void fragment()
-    { 
+    {
         COLOR.rgb = construct_scene(PointLight(vec3(0.0, 0.0, 0.0), vec3(1.0, 0.0, 0.0), 1.0), PointLight(vec3(0.0, 0.0, 0.0), vec3(1.0, 0.0, 1.0), 1.0)).lights[0].color;
     }
 
@@ -460,7 +460,7 @@ It is possible to define functions in a Godot shader. They use the following syn
     }
 
 
-You can only use functions that have been defined above (higher in the editor) the function from which you are calling 
+You can only use functions that have been defined above (higher in the editor) the function from which you are calling
 them.
 
 Function arguments can have special qualifiers:
@@ -480,8 +480,8 @@ Example below:
 Varyings
 ~~~~~~~~
 
-To send data from the vertex to the fragment processor function, *varyings* are used. They are set 
-for every primitive vertex in the *vertex processor*, and the value is interpolated for every 
+To send data from the vertex to the fragment processor function, *varyings* are used. They are set
+for every primitive vertex in the *vertex processor*, and the value is interpolated for every
 pixel in the fragment processor.
 
 .. code-block:: glsl
@@ -559,7 +559,7 @@ Uniforms can't be written from within the shader.
 
 You can set uniforms in the editor in the material. Or you can set them through GDScript:
 
-:: 
+::
 
   material.set_shader_param("some_value", some_value)
 
@@ -642,8 +642,8 @@ Built-in functions
 A large number of built-in functions are supported, conforming to GLSL ES 3.0.
 When vec_type (float), vec_int_type, vec_uint_type, vec_bool_type nomenclature is used, it can be scalar or vector.
 
-.. note:: For a list of the functions that are not available in the GLES2 backend, please see the 
-          :ref:`Differences between GLES2 and GLES3 doc <doc_gles2_gles3_differences>`. 
+.. note:: For a list of the functions that are not available in the GLES2 backend, please see the
+          :ref:`Differences between GLES2 and GLES3 doc <doc_gles2_gles3_differences>`.
 
 +------------------------------------------------------------------------+---------------------------------------------------------------+
 | Function                                                               | Description                                                   |

+ 16 - 16
tutorials/shading/shading_reference/sky_shader.rst

@@ -5,21 +5,21 @@ Sky shaders
 
 Sky shaders are a special type of shader used for drawing sky backgrounds
 and for updating radiance cubemaps which are used for image-based lighting
-(IBL). Sky shaders only have one processing function, the ``fragment()`` 
+(IBL). Sky shaders only have one processing function, the ``fragment()``
 function.
 
 There are three places the sky shader is used.
 
-* First the sky shader is used to draw the sky when you have selected to use 
-  a Sky as the background in your scene. 
+* First the sky shader is used to draw the sky when you have selected to use
+  a Sky as the background in your scene.
 * Second, the sky shader is used to update the radiance cubemap
-  when using the Sky for ambient color or reflections. 
+  when using the Sky for ambient color or reflections.
 * Third, the sky shader is used to draw the lower res subpasses which can be
-  used in the high-res background or cubemap pass. 
+  used in the high-res background or cubemap pass.
 
 In total, this means the sky shader can run up
 to six times per frame, however, in practice it will be much less than that
-because the radiance cubemap does not need to be updated every frame, and 
+because the radiance cubemap does not need to be updated every frame, and
 not all subpasses will be used. You can change the behavior of the shader
 based on where it is called by checking the ``AT_*_PASS`` booleans. For
 example:
@@ -30,7 +30,7 @@ example:
 
     void fragment() {
         if (AT_CUBEMAP_PASS) {
-            // Sets the radiance cubemap to a nice shade of blue instead of doing 
+            // Sets the radiance cubemap to a nice shade of blue instead of doing
             // expensive sky calculations
             COLOR = vec3(0.2, 0.6, 1.0);
         } else {
@@ -44,15 +44,15 @@ When using the sky shader to draw a background, the shader will be called for
 all non-occluded fragments on the screen. However, for the background's
 subpasses, the shader will be called for every pixel of the subpass.
 
-When using the sky shader to update the radiance cubemap, the sky shader 
+When using the sky shader to update the radiance cubemap, the sky shader
 will be called for every pixel in the cubemap. On the other hand, the shader
-will only be called when the radiance cubemap needs to be updated. The radiance 
-cubemap needs to be updated when any of the shader parameters are updated. 
+will only be called when the radiance cubemap needs to be updated. The radiance
+cubemap needs to be updated when any of the shader parameters are updated.
 For example, if ``TIME`` is used in the shader, then the radiance cubemap
 will update every frame. The following list of changes force an update of
 the radiance cubemap:
 
-* ``TIME`` is used. 
+* ``TIME`` is used.
 * ``POSITION`` is used and the camera position changes.
 * If any ``LIGHTX_*`` properties are used and any
   :ref:`DirectionalLight3D <class_DirectionalLight>` changes.
@@ -60,7 +60,7 @@ the radiance cubemap:
 * If the screen is resized and either of the subpasses are used.
 
 Try to avoid updating the radiance cubemap needlessly. If you do need to
-update the radiance cubemap each frame, make sure your 
+update the radiance cubemap each frame, make sure your
 :ref:`Sky process mode <class_Sky_property_process_mode>` is set to
 :ref:`REALTIME <class_Sky_constant_PROCESS_MODE_REALTIME>`.
 
@@ -82,7 +82,7 @@ a lower resolution than the rest of the sky:
             vec4 color = generate_clouds(EYEDIR);
             COLOR = color.rgb;
             ALPHA = color.a;
-        } else { 
+        } else {
             // At full resolution pass, blend sky and clouds together
             vec3 color = generate_sky(EYEDIR);
             COLOR = color + HALF_RES_COLOR.rgb * HALF_RES_COLOR.a;
@@ -100,9 +100,9 @@ a lower resolution than the rest of the sky:
 Built-ins
 ^^^^^^^^^
 
-Values marked as "in" are read-only. Values marked as "out" are for optional writing and will 
-not necessarily contain sensible values. Values marked as "inout" provide a sensible default 
-value, and can optionally be written to. Samplers are not subjects of writing and they are 
+Values marked as "in" are read-only. Values marked as "out" are for optional writing and will
+not necessarily contain sensible values. Values marked as "inout" provide a sensible default
+value, and can optionally be written to. Samplers are not subjects of writing and they are
 not marked.
 
 Global built-ins

+ 12 - 12
tutorials/shading/shading_reference/spatial_shader.rst

@@ -3,7 +3,7 @@
 Spatial shaders
 ===============
 
-Spatial shaders are used for shading 3D objects. They are the most complex type of shader Godot offers. 
+Spatial shaders are used for shading 3D objects. They are the most complex type of shader Godot offers.
 Spatial shaders are highly configurable with different render modes and different rendering options
 (e.g. Subsurface Scattering, Transmission, Ambient Occlusion, Rim lighting etc). Users can optionally
 write vertex, fragment, and light processor functions to affect how objects are drawn.
@@ -72,7 +72,7 @@ Render modes
 +---------------------------------+-----------------------------------------------------------------------+
 | **ambient_light_disabled**      | Disable contribution from ambient light and radiance map.             |
 +---------------------------------+-----------------------------------------------------------------------+
-| **shadow_to_opacity**           | Lighting modifies the alpha so shadowed areas are opaque and          | 
+| **shadow_to_opacity**           | Lighting modifies the alpha so shadowed areas are opaque and          |
 |                                 | non-shadowed areas are transparent. Useful for overlaying shadows onto|
 |                                 | a camera feed in AR.                                                  |
 +---------------------------------+-----------------------------------------------------------------------+
@@ -80,9 +80,9 @@ Render modes
 Built-ins
 ^^^^^^^^^
 
-Values marked as "in" are read-only. Values marked as "out" are for optional writing and will 
-not necessarily contain sensible values. Values marked as "inout" provide a sensible default 
-value, and can optionally be written to. Samplers are not subjects of writing and they are 
+Values marked as "in" are read-only. Values marked as "out" are for optional writing and will
+not necessarily contain sensible values. Values marked as "inout" provide a sensible default
+value, and can optionally be written to. Samplers are not subjects of writing and they are
 not marked.
 
 Global built-ins
@@ -99,13 +99,13 @@ Global built-ins are available everywhere, including custom functions.
 Vertex built-ins
 ^^^^^^^^^^^^^^^^
 
-Vertex data (``VERTEX``, ``NORMAL``, ``TANGENT``, ``BITANGENT``) are presented in local 
-model space. If not written to, these values will not be modified and be passed through 
+Vertex data (``VERTEX``, ``NORMAL``, ``TANGENT``, ``BITANGENT``) are presented in local
+model space. If not written to, these values will not be modified and be passed through
 as they came.
 
 They can optionally be presented in world space by using the *world_vertex_coords* render mode.
 
-Users can disable the built-in modelview transform (projection will still happen later) and do 
+Users can disable the built-in modelview transform (projection will still happen later) and do
 it manually with the following code:
 
 .. code-block:: glsl
@@ -122,7 +122,7 @@ it manually with the following code:
 Other built-ins, such as UV, UV2 and COLOR, are also passed through to the fragment function if not modified.
 
 Users can override the modelview and projection transforms using the ``POSITION`` built-in. When ``POSITION`` is used,
-the value from ``VERTEX`` is ignored and projection does not happen. However, the value passed to the fragment shader 
+the value from ``VERTEX`` is ignored and projection does not happen. However, the value passed to the fragment shader
 still comes from ``VERTEX``.
 
 For instancing, the INSTANCE_CUSTOM variable contains the instance custom data. When using particles, this information
@@ -184,8 +184,8 @@ Fragment built-ins
 ^^^^^^^^^^^^^^^^^^
 
 The default use of a Godot fragment processor function is to set up the material properties of your object
-and to let the built-in renderer handle the final shading. However, you are not required to use all 
-these properties, and if you don't write to them, Godot will optimize away the corresponding functionality. 
+and to let the built-in renderer handle the final shading. However, you are not required to use all
+these properties, and if you don't write to them, Godot will optimize away the corresponding functionality.
 
 +-----------------------------------+--------------------------------------------------------------------------------------------------+
 | Built-in                          | Description                                                                                      |
@@ -282,7 +282,7 @@ render_mode to ``unshaded``. If no light function is written, Godot will use the
 properties written to in the fragment function to calculate the lighting for you (subject to
 the render_mode).
 
-To write a light function, assign something to ``DIFFUSE_LIGHT`` or ``SPECULAR_LIGHT``. Assigning nothing 
+To write a light function, assign something to ``DIFFUSE_LIGHT`` or ``SPECULAR_LIGHT``. Assigning nothing
 means no light is processed.
 
 The light function is called for every light in every pixel. It is called within a loop for

+ 1 - 1
tutorials/shading/your_first_shader/index.rst

@@ -11,7 +11,7 @@ comprehensive. For a comprehensive and detailed overview of shaders in Godot see
 into the rendering pipeline.
 
 The "your first shader" tutorials walk you through the process of writing a shader
-step-by-step. 
+step-by-step.
 
 For a more general introduction into shaders and the OpenGL Shading Language, use
 `The Book of Shaders <https://thebookofshaders.com>`_.

+ 20 - 20
tutorials/shading/your_first_shader/what_are_shaders.rst

@@ -6,12 +6,12 @@ What are shaders?
 Introduction
 ------------
 
-So, you have decided to give shaders a try. You have likely heard that they can be used to 
+So, you have decided to give shaders a try. You have likely heard that they can be used to
 create interesting effects that run incredibly fast. You have also likely heard that they
-are terrifying. Both are true. 
+are terrifying. Both are true.
 
 Shaders can be used to create a wide range of effects (in fact everything drawn in a modern
-rendering engine is done with shaders). 
+rendering engine is done with shaders).
 
 Writing shaders can also be very difficult for people unfamiliar with them. Godot tries to make writing
 shaders a little easier by exposing many useful built-in features and handling some of the
@@ -22,19 +22,19 @@ But what are they?
 ------------------
 
 Shaders are a special kind of program that runs on Graphics Processing Units (GPUs). Most computers
-have some sort of GPU, either one integrated into their CPU or discrete (meaning it is a separate 
-hardware component, for example, the typical graphics card). GPUs are especially useful for 
+have some sort of GPU, either one integrated into their CPU or discrete (meaning it is a separate
+hardware component, for example, the typical graphics card). GPUs are especially useful for
 rendering because they are optimized for running thousands of instructions in parallel.
 
 The output of the shader is typically the colored pixels of the object drawn to the viewport. But some
 shaders allow for specialized outputs (this is especially true for APIs like Vulkan). Shaders operate
 inside the shader pipeline. The standard process is the vertex -> fragment shader pipeline. The vertex
-shader is used to decided where each vertex (point in a 3D model, or corner of a Sprite) goes and the 
-fragment shader decides what color individual pixels receive. 
+shader is used to decided where each vertex (point in a 3D model, or corner of a Sprite) goes and the
+fragment shader decides what color individual pixels receive.
 
 Suppose you want to update all the pixels in a texture to a given color, on the CPU you would write:
 
-:: 
+::
 
   for x in range(width):
     for y in range(height):
@@ -52,11 +52,11 @@ In a shader you are given access only to the inside of the loop so what you writ
 You have no control over how this function is called. So you have to design your shaders
 differently from how you would design programs on the CPU.
 
-A consequence of the shader pipeline is that you cannot access the results from a previous 
-run of the shader, you cannot access other pixels from the pixel being drawn, and you cannot 
-write outside of the current pixel being drawn. This enables the GPU to execute the shader 
-for different pixels in parallel, as they do not depend on each other. This lack of 
-flexibility is designed to work with the GPU which allows shaders to be incredibly fast. 
+A consequence of the shader pipeline is that you cannot access the results from a previous
+run of the shader, you cannot access other pixels from the pixel being drawn, and you cannot
+write outside of the current pixel being drawn. This enables the GPU to execute the shader
+for different pixels in parallel, as they do not depend on each other. This lack of
+flexibility is designed to work with the GPU which allows shaders to be incredibly fast.
 
 What can they do
 ^^^^^^^^^^^^^^^^
@@ -73,22 +73,22 @@ What can't they do
 - access other pixels from current pixel (or vertices)
 - store previous iterations
 - update on the fly (they can, but they need to be compiled)
- 
+
 Structure of a shader
 ---------------------
 
 In Godot, shaders are made up of 3 main functions: the ``vertex()`` function, the ``fragment()``
-function and the ``light()`` function. 
+function and the ``light()`` function.
 
 The ``vertex()`` function runs over all the vertices in the mesh and sets their positions as well
 as some other per-vertex variables.
 
 The ``fragment()`` function runs for every pixel that is covered by the mesh. It uses the variables
-from the ``vertex()`` function to run. The variables from the ``vertex()`` function are interpolated 
+from the ``vertex()`` function to run. The variables from the ``vertex()`` function are interpolated
 between the vertices to provide the values for the ``fragment()`` function.
 
-The ``light()`` function runs for every pixel and for every light. It takes variables from the 
-``fragment()`` function and from previous runs of itself. 
+The ``light()`` function runs for every pixel and for every light. It takes variables from the
+``fragment()`` function and from previous runs of itself.
 
 For more information about how shaders operate specifically in Godot see the :ref:`Shaders <doc_shaders>` doc.
 
@@ -103,9 +103,9 @@ many or all cores are doing the same calculation at once, but with different dat
 
 That is where shaders come in. The GPU will call the shader a bunch of times simultaneously, and then
 operate on different bits of data (vertices, or pixels). These bunches of data are often called wavefronts.
-A shader will run the same for every thread in the wavefront. For example, if a given GPU can handle 100 
+A shader will run the same for every thread in the wavefront. For example, if a given GPU can handle 100
 threads per wavefront, a wavefront will run on a 10×10 block of pixels together. It will continue to
-run for all pixels in that wavefront until they are complete. Accordingly, if you have one pixel slower 
+run for all pixels in that wavefront until they are complete. Accordingly, if you have one pixel slower
 than the rest (due to excessive branching), the entire block will be slowed down, resulting in massively
 slower render times.
 

+ 18 - 18
tutorials/shading/your_first_shader/your_first_canvasitem_shader.rst

@@ -6,7 +6,7 @@ Your first CanvasItem shader
 Introduction
 ------------
 
-Shaders are special programs that execute on the GPU and are used for rendering 
+Shaders are special programs that execute on the GPU and are used for rendering
 graphics. All modern rendering is done with shaders. For a more detailed description
 of what shaders are please see :ref:`What are shaders <doc_what_are_shaders>`.
 
@@ -14,16 +14,16 @@ This tutorial will focus on the practical aspects of writing shader programs by
 you through the process of writing a shader with both vertex and fragment functions.
 This tutorial targets absolute beginners to shaders.
 
-.. note:: If you have experience writing shaders and are just looking for 
+.. note:: If you have experience writing shaders and are just looking for
           an overview of how shaders work in Godot, see the :ref:`Shading Reference <toc-shading-reference>`.
 
 Setup
 -----
 
-:ref:`CanvasItem <doc_canvas_item_shader>` shaders are used to draw all 2D objects in Godot, 
+:ref:`CanvasItem <doc_canvas_item_shader>` shaders are used to draw all 2D objects in Godot,
 while :ref:`Spatial <doc_spatial_shader>` shaders are used to draw all 3D objects.
 
-In order to use a shader it must be attached inside a :ref:`Material <class_material>` 
+In order to use a shader it must be attached inside a :ref:`Material <class_material>`
 which must be attached to an object. Materials are a type of :ref:`Resource <doc_resources>`.
 To draw multiple objects with the same material, the material must be attached to each object.
 
@@ -54,7 +54,7 @@ In Godot, all shaders start with a line specifying what type of shader they are.
 the following format:
 
 .. code-block:: glsl
-  
+
   shader_type canvas_item;
 
 Because we are writing a CanvasItem shader, we specify ``canvas_item`` in the first line. All our code will
@@ -63,7 +63,7 @@ go beneath this declaration.
 This line tells the engine which built-in variables and functionality to supply you with.
 
 In Godot you can override three functions to control how the shader operates; ``vertex``, ``fragment``, and ``light``.
-This tutorial will walk you through writing a shader with both vertex and fragment functions. Light 
+This tutorial will walk you through writing a shader with both vertex and fragment functions. Light
 functions are significantly more complex than vertex and fragment functions and so will not be covered here.
 
 Your first fragment function
@@ -71,14 +71,14 @@ Your first fragment function
 
 The fragment function runs for every pixel in a Sprite and determines what color that pixel should be.
 
-They are restricted to the pixels covered by the Sprite, that means you cannot use one to, for example, 
-create an outline around a Sprite. 
+They are restricted to the pixels covered by the Sprite, that means you cannot use one to, for example,
+create an outline around a Sprite.
 
-The most basic fragment function does nothing except assign a single color to every pixel. 
+The most basic fragment function does nothing except assign a single color to every pixel.
 
 We do so by writing a ``vec4`` to the built-in variable ``COLOR``. ``vec4`` is shorthand for constructing
-a vector with 4 numbers. For more information about vectors see the :ref:`Vector math tutorial <doc_vector_math>` 
-``COLOR`` is both an input variable to the fragment function and the final output from it. 
+a vector with 4 numbers. For more information about vectors see the :ref:`Vector math tutorial <doc_vector_math>`
+``COLOR`` is both an input variable to the fragment function and the final output from it.
 
 .. code-block:: glsl
 
@@ -124,7 +124,7 @@ manually like in the code below.
     COLOR.b = 1.0;
   }
 
-The default fragment function reads from a texture and displays it. When you overwrite the default fragment function, 
+The default fragment function reads from a texture and displays it. When you overwrite the default fragment function,
 you lose that functionality, so you have to implement it yourself. You read from textures using the
 ``texture`` function. Certain nodes, like Sprites, have a dedicated texture variable that can be accessed in the shader
 using ``TEXTURE``. Use it together with ``UV`` and ``texture`` to draw the Sprite.
@@ -163,7 +163,7 @@ Add a uniform to change the amount of blue in our Sprite.
   }
 
 Now you can change the amount of blue in the Sprite from the editor. Look back at the Inspector
-under where you created your shader. You should see a section called "Shader Param". Unfold that 
+under where you created your shader. You should see a section called "Shader Param". Unfold that
 section and you will see the uniform you just declared. If you change the value in the editor, it
 will overwrite the default value you provided in the shader.
 
@@ -178,7 +178,7 @@ material resource. With a Sprite node, the following code can be used to set the
   var blue_value = 1.0
   material.set_shader_param("blue", blue_value)
 
-Note that the name of the uniform is a string. The string must match exactly with how it is 
+Note that the name of the uniform is a string. The string must match exactly with how it is
 written in the shader, including spelling and case.
 
 Your first vertex function
@@ -190,7 +190,7 @@ Use the vertex function to calculate where on the screen each vertex should end
 
 The most important variable in the vertex function is ``VERTEX``. Initially, it specifies
 the vertex coordinates in your model, but you also write to it to determine where to actually
-draw those vertices. ``VERTEX`` is a ``vec2`` that is initially presented in local-space 
+draw those vertices. ``VERTEX`` is a ``vec2`` that is initially presented in local-space
 (i.e. not relative to the camera, viewport, or parent nodes).
 
 You can offset the vertices by directly adding to ``VERTEX``.
@@ -199,7 +199,7 @@ You can offset the vertices by directly adding to ``VERTEX``.
 
   void vertex() {
     VERTEX += vec2(10.0, 0.0);
-  } 
+  }
 
 Combined with the ``TIME`` built-in variable, this can be used for simple animation.
 
@@ -214,7 +214,7 @@ Conclusion
 ----------
 
 At their core, shaders do what you have seen so far, they compute ``VERTEX`` and ``COLOR``. It is
-up to you to dream up more complex mathematical strategies for assigning values to those variables. 
+up to you to dream up more complex mathematical strategies for assigning values to those variables.
 
 For inspiration, take a look at some of the more advanced shader tutorials, and look at other sites
-like `Shadertoy <https://www.shadertoy.com/results?query=&sort=popular&from=10&num=4>`_ and `The Book of Shaders <https://thebookofshaders.com>`_. 
+like `Shadertoy <https://www.shadertoy.com/results?query=&sort=popular&from=10&num=4>`_ and `The Book of Shaders <https://thebookofshaders.com>`_.

+ 34 - 34
tutorials/shading/your_first_shader/your_first_spatial_shader.rst

@@ -3,9 +3,9 @@
 Your first Spatial shader
 ============================
 
-You have decided to start writing your own custom Spatial shader. Maybe you saw a cool trick 
-online that was done with shaders, or you have found that the 
-:ref:`StandardMaterial3D <class_StandardMaterial3D>` isn't quite meeting your needs. Either way, 
+You have decided to start writing your own custom Spatial shader. Maybe you saw a cool trick
+online that was done with shaders, or you have found that the
+:ref:`StandardMaterial3D <class_StandardMaterial3D>` isn't quite meeting your needs. Either way,
 you have decided to write your own and now you need figure out where to start.
 
 This tutorial will explain how to write a Spatial shader and will cover more topics than the
@@ -13,19 +13,19 @@ This tutorial will explain how to write a Spatial shader and will cover more top
 
 Spatial shaders have more built-in functionality than CanvasItem shaders. The expectation with
 spatial shaders is that Godot has already provided the functionality for common use cases and all
-the user needs to do in the shader is set the proper parameters. This is especially true for a 
+the user needs to do in the shader is set the proper parameters. This is especially true for a
 PBR (physically based rendering) workflow.
 
 This is a two-part tutorial. In this first part we are going to go through how to make a simple terrain
-using vertex displacement from a heightmap in the vertex function. In the :ref:`second part <doc_your_second_spatial_shader>` 
-we are going to take the concepts from this tutorial and walk through how to set up custom materials 
+using vertex displacement from a heightmap in the vertex function. In the :ref:`second part <doc_your_second_spatial_shader>`
+we are going to take the concepts from this tutorial and walk through how to set up custom materials
 in a fragment shader by writing an ocean water shader.
 
 .. note:: This tutorial assumes some basic shader knowledge such as types (``vec2``, ``float``,
-          ``sampler2D``), and functions. If you are uncomfortable with these concepts it is 
-          best to get a gentle introduction from `The Book of Shaders 
+          ``sampler2D``), and functions. If you are uncomfortable with these concepts it is
+          best to get a gentle introduction from `The Book of Shaders
           <https://thebookofshaders.com>`_ before completing this tutorial.
-          
+
 Where to assign my material
 ---------------------------
 
@@ -33,26 +33,26 @@ In 3D, objects are drawn using :ref:`Meshes <class_Mesh>`. Meshes are a resource
 (the shape of your object) and materials (the color and how the object reacts to light) in units called
 "surfaces". A Mesh can have multiple surfaces, or just one. Typically, you would
 import a mesh from another program (e.g. Blender). But Godot also has a few :ref:`PrimitiveMeshes <class_primitivemesh>`
-that allow you to add basic geometry to a scene without importing Meshes. 
+that allow you to add basic geometry to a scene without importing Meshes.
 
 There are multiple node types that you can use to draw a mesh. The main one is :ref:`MeshInstance <class_meshinstance>`,
 but you can also use :ref:`Particles <class_particles>`, :ref:`MultiMeshes <class_MultiMesh>` (with a
 :ref:`MultiMeshInstance <class_multimeshinstance>`), or others.
 
 Typically, a material is associated with a given surface in a mesh, but some nodes, like MeshInstance, allow
-you to override the material for a specific surface, or for all surfaces. 
+you to override the material for a specific surface, or for all surfaces.
 
-If you set a material on the surface or mesh itself, then all MeshInstances that share that mesh will share that material. 
-However, if you want to reuse the same mesh across multiple mesh instances, but have different materials for each 
+If you set a material on the surface or mesh itself, then all MeshInstances that share that mesh will share that material.
+However, if you want to reuse the same mesh across multiple mesh instances, but have different materials for each
 instance then you should set the material on the Meshinstance.
 
-For this tutorial we will set our material on the mesh itself rather than taking advantage of the MeshInstance's 
-ability to override materials. 
+For this tutorial we will set our material on the mesh itself rather than taking advantage of the MeshInstance's
+ability to override materials.
 
 Setting up
 ----------
 
-Add a new :ref:`MeshInstance <class_meshinstance>` node to your scene. 
+Add a new :ref:`MeshInstance <class_meshinstance>` node to your scene.
 
 In the inspector tab beside "Mesh" click "[empty]" and select "New PlaneMesh".
 Then click on the image of a plane that appears.
@@ -76,11 +76,11 @@ us more vertices to work with and thus allow us to add more detail.
 
 .. image:: img/plane-sub.png
 
-:ref:`PrimitiveMeshes <class_primitivemesh>`, like PlaneMesh, only have one surface, so instead of 
-an array of materials there is only one. Click beside "Material" where it says "[empty]" and 
-select "New ShaderMaterial". Then click the sphere that appears. 
+:ref:`PrimitiveMeshes <class_primitivemesh>`, like PlaneMesh, only have one surface, so instead of
+an array of materials there is only one. Click beside "Material" where it says "[empty]" and
+select "New ShaderMaterial". Then click the sphere that appears.
 
-Now click beside "Shader" where it says "[empty]" and select "New Shader". 
+Now click beside "Shader" where it says "[empty]" and select "New Shader".
 
 The shader editor should now pop up and you are ready to begin writing your first Spatial shader!
 
@@ -97,8 +97,8 @@ We set the variable ``shader_type`` to ``spatial`` because this is a spatial sha
 
   shader_type spatial;
 
-Next we will define the ``vertex()`` function. The ``vertex()`` function determines where 
-the vertices of your :ref:`Mesh<class_MeshInstance>` appear in the final scene. We will be 
+Next we will define the ``vertex()`` function. The ``vertex()`` function determines where
+the vertices of your :ref:`Mesh<class_MeshInstance>` appear in the final scene. We will be
 using it to offset the height of each vertex and make our flat plane appear like a little terrain.
 
 We define the vertex shader like so:
@@ -213,7 +213,7 @@ precedence over the value used to initialize it in the shader.
   # called from the MeshInstance
   mesh.material.set_shader_param("height_scale", 0.5)
 
-.. note:: Changing uniforms in Spatial-based nodes is different from CanvasItem-based nodes. Here, 
+.. note:: Changing uniforms in Spatial-based nodes is different from CanvasItem-based nodes. Here,
           we set the material inside the PlaneMesh resource. In other mesh resources you may
           need to first access the material by calling ``surface_get_material()``. While in
           the MeshInstance you would access the material using ``get_surface_material()`` or
@@ -250,15 +250,15 @@ First, we will add an :ref:`OmniLight<class_OmniLight>` to the scene.
 .. image:: img/light.png
 
 You can see the light affecting the terrain, but it looks odd. The problem is the light
-is affecting the terrain as if it were a flat plane. This is because the light shader uses 
-the normals from the :ref:`Mesh <class_mesh>` to calculate light. 
+is affecting the terrain as if it were a flat plane. This is because the light shader uses
+the normals from the :ref:`Mesh <class_mesh>` to calculate light.
 
-The normals are stored in the Mesh, but we are changing the shape of the Mesh in the 
-shader, so the normals are no longer correct. To fix this, we can recalculate the normals 
-in the shader or use a normal texture that corresponds to our noise. Godot makes both easy for us. 
+The normals are stored in the Mesh, but we are changing the shape of the Mesh in the
+shader, so the normals are no longer correct. To fix this, we can recalculate the normals
+in the shader or use a normal texture that corresponds to our noise. Godot makes both easy for us.
 
 You can calculate the new normal manually in the vertex function and then just set ``NORMAL``.
-With ``NORMAL`` set, Godot will do all the difficult lighting calculations for us. We will cover 
+With ``NORMAL`` set, Godot will do all the difficult lighting calculations for us. We will cover
 this method in the next part of this tutorial, for now we will read normals from a texture.
 
 Instead we will rely on the NoiseTexture again to calculate normals for us. We do that by passing in
@@ -283,11 +283,11 @@ function. The ``fragment()`` function will be explained in more detail in the ne
 
 When we have normals that correspond to a specific vertex we set ``NORMAL``, but if you have a normalmap
 that comes from a texture, set the normal using ``NORMALMAP``. This way Godot will handle the wrapping the
-texture around the mesh automatically. 
+texture around the mesh automatically.
 
 Lastly, in order to ensure that we are reading from the same places on the noise texture and the normalmap
 texture, we are going to pass the ``VERTEX.xz`` position from the ``vertex()`` function to the ``fragment()``
-function. We do that with varyings. 
+function. We do that with varyings.
 
 Above the ``vertex()`` define a ``vec2`` called ``vertex_position``. And inside the ``vertex()`` function
 assign ``VERTEX.xz`` to ``vertex_position``.
@@ -318,7 +318,7 @@ We can even drag the light around and the lighting will update automatically.
 .. image:: img/normalmap2.png
 
 Here is the full code for this tutorial. You can see it is not very long as Godot handles
-most of the difficult stuff for you. 
+most of the difficult stuff for you.
 
 .. code-block:: glsl
 
@@ -341,6 +341,6 @@ most of the difficult stuff for you.
   }
 
 That is everything for this part. Hopefully, you now understand the basics of vertex
-shaders in Godot. In the next part of this tutorial we will write a fragment function 
+shaders in Godot. In the next part of this tutorial we will write a fragment function
 to accompany this vertex function and we will cover a more advanced technique to turn
-this terrain into an ocean of moving waves. 
+this terrain into an ocean of moving waves.

+ 35 - 35
tutorials/shading/your_first_shader/your_second_spatial_shader.rst

@@ -23,16 +23,16 @@ As mentioned in the previous part of this tutorial. The standard use of the frag
 in Godot is to set up different material properties and let Godot handle the rest. In order
 to provide even more flexibility, Godot also provides things called render modes. Render
 modes are set at the top of the shader, directly below ``shader_type``, and they specify
-what sort of functionality you want the built-in aspects of the shader to have. 
+what sort of functionality you want the built-in aspects of the shader to have.
 
-For example, if you do not want to have lights affect an object, set the render mode to 
+For example, if you do not want to have lights affect an object, set the render mode to
 ``unshaded``:
 
 .. code-block:: glsl
 
   render_mode unshaded;
 
-You can also stack multiple render modes together. For example, if you want to use toon 
+You can also stack multiple render modes together. For example, if you want to use toon
 shading instead of more-realistic PBR shading, set the diffuse mode and specular mode to toon:
 
 .. code-block:: glsl
@@ -44,8 +44,8 @@ only a few parameters.
 
 For a full list of render modes see the :ref:`Spatial shader reference <doc_spatial_shader>`.
 
-In this part of the tutorial, we will walk through how to take the bumpy terrain from the 
-previous part and turn it into an ocean.  
+In this part of the tutorial, we will walk through how to take the bumpy terrain from the
+previous part and turn it into an ocean.
 
 First let's set the color of the water. We do that by setting ``ALBEDO``.
 
@@ -67,13 +67,13 @@ come from reflections from the sky.
 The PBR model that Godot uses relies on two main parameters: ``METALLIC`` and ``ROUGHNESS``.
 
 ``ROUGHNESS`` specifies how smooth/rough the surface of a material is. A low ``ROUGHNESS`` will
-make a material appear like a shiny plastic, while a high roughness makes the material appear 
+make a material appear like a shiny plastic, while a high roughness makes the material appear
 more solid in color.
 
 ``METALLIC`` specifies how much like a metal the object is. It is better set close to ``0`` or ``1``.
-Think of ``METALLIC`` as changing the balance between the reflection and the ``ALBEDO`` color. A 
+Think of ``METALLIC`` as changing the balance between the reflection and the ``ALBEDO`` color. A
 high ``METALLIC`` almost ignores ``ALBEDO`` altogether, and looks like a mirror of the sky. While
-a low ``METALLIC`` has a more equal representation of sky color and ``ALBEDO`` color. 
+a low ``METALLIC`` has a more equal representation of sky color and ``ALBEDO`` color.
 
 ``ROUGHNESS`` increases from ``0`` to ``1`` from left to right while ``METALLIC`` increase from
 ``0`` to ``1`` from top to bottom.
@@ -96,15 +96,15 @@ reflective, so we will set its ``ROUGHNESS`` property to be quite low as well.
 
 .. image:: img/plastic.png
 
-Now we have a smooth plastic looking surface. It is time to think about some particular properties of 
-water that we want to emulate. There are two main ones that will take this from a weird plastic surface 
+Now we have a smooth plastic looking surface. It is time to think about some particular properties of
+water that we want to emulate. There are two main ones that will take this from a weird plastic surface
 to nice stylized water. The first is specular reflections. Specular reflections are those bright spots
 you see from where the sun reflects directly into your eye. The second is fresnel reflectance.
-Fresnel reflectance is the property of objects to become more reflective at shallow angles. It is the 
+Fresnel reflectance is the property of objects to become more reflective at shallow angles. It is the
 reason why you can see into water below you, but farther away it reflects the sky.
 
-In order to increase the specular reflections, we will do two things. First, we will change the render 
-mode for specular to toon because the toon render mode has larger specular highlights. 
+In order to increase the specular reflections, we will do two things. First, we will change the render
+mode for specular to toon because the toon render mode has larger specular highlights.
 
 .. code-block:: glsl
 
@@ -114,13 +114,13 @@ mode for specular to toon because the toon render mode has larger specular highl
 
 Second we will
 add rim lighting. Rim lighting increases the effect of light at glancing angles. Usually it is used
-to emulate the way light passes through fabric on the edges of an object, but we will use it here to 
+to emulate the way light passes through fabric on the edges of an object, but we will use it here to
 help achieve a nice watery effect.
 
 .. code-block:: glsl
 
   void fragment() {
-    RIM = 0.2;  
+    RIM = 0.2;
     METALLIC = 0.0;
     ROUGHNESS = 0.01;
     ALBEDO = vec3(0.1, 0.3, 0.5);
@@ -140,7 +140,7 @@ when you are looking at the surface head-on or at a glancing angle.
 
   float fresnel = sqrt(1.0 - dot(NORMAL, VIEW));
 
-And mix it into both ``ROUGHNESS`` and ``ALBEDO``. This is the benefit of ShaderMaterials over 
+And mix it into both ``ROUGHNESS`` and ``ALBEDO``. This is the benefit of ShaderMaterials over
 StandardMaterial3Ds. With StandardMaterial3D, we could set these properties with a texture, or to a flat
 number. But with shaders we can set them based on any mathematical function that we can dream up.
 
@@ -149,7 +149,7 @@ number. But with shaders we can set them based on any mathematical function that
 
   void fragment() {
     float fresnel = sqrt(1.0 - dot(NORMAL, VIEW));
-    RIM = 0.2;  
+    RIM = 0.2;
     METALLIC = 0.0;
     ROUGHNESS = 0.01 * (1.0 - fresnel);
     ALBEDO = vec3(0.1, 0.3, 0.5) + (0.1 * fresnel);
@@ -158,8 +158,8 @@ number. But with shaders we can set them based on any mathematical function that
 .. image:: img/fresnel.png
 
 And now, with only 5 lines of code, you can have complex looking water. Now that we have
-lighting, this water is looking too bright. Let's darken it. This is done easily by 
-decreasing the values of the ``vec3`` we pass into ``ALBEDO``. Let's set them to 
+lighting, this water is looking too bright. Let's darken it. This is done easily by
+decreasing the values of the ``vec3`` we pass into ``ALBEDO``. Let's set them to
 ``vec3(0.01, 0.03, 0.05)``.
 
 .. image:: img/dark-water.png
@@ -169,10 +169,10 @@ Animating with ``TIME``
 
 Going back to the vertex function, we can animate the waves using the built-in variable ``TIME``.
 
-``TIME`` is a built-in variable that is accessible from the vertex and fragment functions. 
+``TIME`` is a built-in variable that is accessible from the vertex and fragment functions.
 
 
-In the last tutorial we calculated height by reading from a heightmap. For this tutorial, 
+In the last tutorial we calculated height by reading from a heightmap. For this tutorial,
 we will do the same. Put the heightmap code in a function called ``height()``.
 
 .. code-block:: glsl
@@ -181,7 +181,7 @@ we will do the same. Put the heightmap code in a function called ``height()``.
     return texture(noise, position / 10.0).x; // Scaling factor is based on mesh size (this PlaneMesh is 10×10).
   }
 
-In order to use ``TIME`` in the ``height()`` function, we need to pass it in. 
+In order to use ``TIME`` in the ``height()`` function, we need to pass it in.
 
 .. code-block:: glsl
 
@@ -198,7 +198,7 @@ And make sure to correctly pass it in inside the vertex function.
     VERTEX.y = k;
   }
 
-Instead of using a normalmap to calculate normals. We are going to compute them manually in the 
+Instead of using a normalmap to calculate normals. We are going to compute them manually in the
 ``vertex()`` function. To do so use the following line of code.
 
 .. code-block:: glsl
@@ -229,7 +229,7 @@ What makes shaders so powerful is that you can achieve complex effects by using
 this, we are going to take our waves to the next level by modifying the ``height()`` function and
 by introducing a new function called ``wave()``.
 
-``wave()`` has one parameter, ``position``, which is the same as it is in ``height()``. 
+``wave()`` has one parameter, ``position``, which is the same as it is in ``height()``.
 
 We are going to call ``wave()`` multiple times in ``height()`` in order to fake the way waves look.
 
@@ -256,17 +256,17 @@ they won't be straight lines completely aligned with the grid.
 
 Define a wave-like function using ``sin()`` and ``position``. Normally ``sin()`` waves are very round.
 We use ``abs()`` to absolute to give them a sharp ridge and constrain them to the 0-1 range. And then we
-subtract it from ``1.0`` to put the peak on top. 
+subtract it from ``1.0`` to put the peak on top.
 
 .. code-block:: glsl
 
     return pow(1.0 - pow(wv.x * wv.y, 0.65), 4.0);
 
-Multiply the x-directional wave by the y-directional wave and raise it to a power to sharpen the peaks. 
-Then subtract that from ``1.0`` so that the ridges become peaks and raise that to a power to sharpen the 
-ridges. 
+Multiply the x-directional wave by the y-directional wave and raise it to a power to sharpen the peaks.
+Then subtract that from ``1.0`` so that the ridges become peaks and raise that to a power to sharpen the
+ridges.
 
-We can now replace the contents of our ``height()`` function with ``wave()``. 
+We can now replace the contents of our ``height()`` function with ``wave()``.
 
 .. code-block:: glsl
 
@@ -285,7 +285,7 @@ The shape of the sin wave is too obvious. So let's spread the waves out a bit. W
   float height(vec2 position, float time) {
     float h = wave(position*0.4);
   }
-  
+
 Now it looks much better.
 
 .. image:: img/wave2.png
@@ -308,14 +308,14 @@ Here is an example for how you could layer the four waves to achieve nicer looki
   }
 
 Note that we add time to two and subtract it from the other two. This makes the waves move in different directions
-creating a complex effect. Also note that the amplitudes (the number the result is multiplied by) all 
+creating a complex effect. Also note that the amplitudes (the number the result is multiplied by) all
 add up to ``1.0``. This keeps the wave in the 0-1 range.
 
 With this code you should end up with more complex looking waves and all you had
-to do was add a bit of math! 
+to do was add a bit of math!
 
 .. image:: img/wave3.png
 
-For more information about Spatial shaders read the :ref:`Shading Language <doc_shading_language>` 
-doc and the :ref:`Spatial Shaders <doc_spatial_shader>` doc. Also look at more advanced tutorials 
-in the :ref:`Shading section <toc-learn-features-shading>` and the :ref:`3D <toc-learn-features-3d>` sections. 
+For more information about Spatial shaders read the :ref:`Shading Language <doc_shading_language>`
+doc and the :ref:`Spatial Shaders <doc_spatial_shader>` doc. Also look at more advanced tutorials
+in the :ref:`Shading section <toc-learn-features-shading>` and the :ref:`3D <toc-learn-features-3d>` sections.

+ 6 - 6
tutorials/viewports/using_viewport_as_texture.rst

@@ -167,14 +167,14 @@ to make the planet. We will be using this noise function directly from a `Shader
       vec3 i = floor(p);
       vec3 f = fract(p);
       vec3 u = f * f * (3.0 - 2.0 * f);
-      
-      return mix(mix(mix(dot(hash(i + vec3(0.0, 0.0, 0.0)), f - vec3(0.0, 0.0, 0.0)), 
+
+      return mix(mix(mix(dot(hash(i + vec3(0.0, 0.0, 0.0)), f - vec3(0.0, 0.0, 0.0)),
                          dot(hash(i + vec3(1.0, 0.0, 0.0)), f - vec3(1.0, 0.0, 0.0)), u.x),
-                     mix(dot(hash(i + vec3(0.0, 1.0, 0.0)), f - vec3(0.0, 1.0, 0.0)), 
+                     mix(dot(hash(i + vec3(0.0, 1.0, 0.0)), f - vec3(0.0, 1.0, 0.0)),
                          dot(hash(i + vec3(1.0, 1.0, 0.0)), f - vec3(1.0, 1.0, 0.0)), u.x), u.y),
-                 mix(mix(dot(hash(i + vec3(0.0, 0.0, 1.0)), f - vec3(0.0, 0.0, 1.0)), 
+                 mix(mix(dot(hash(i + vec3(0.0, 0.0, 1.0)), f - vec3(0.0, 0.0, 1.0)),
                          dot(hash(i + vec3(1.0, 0.0, 1.0)), f - vec3(1.0, 0.0, 1.0)), u.x),
-                     mix(dot(hash(i + vec3(0.0, 1.0, 1.0)), f - vec3(0.0, 1.0, 1.0)), 
+                     mix(dot(hash(i + vec3(0.0, 1.0, 1.0)), f - vec3(0.0, 1.0, 1.0)),
                          dot(hash(i + vec3(1.0, 1.0, 1.0)), f - vec3(1.0, 1.0, 1.0)), u.x), u.y), u.z );
     }
 
@@ -197,7 +197,7 @@ looks nothing like the planet you were promised. So let's move onto something mo
 Coloring the planet
 -------------------
 
-Now to make the planet colors. While there are many ways to do this, for now, we will stick 
+Now to make the planet colors. While there are many ways to do this, for now, we will stick
 with a gradient between water and land.
 
 To make a gradient in GLSL, we use the ``mix`` function. ``mix`` takes two values to interpolate

+ 1 - 1
tutorials/vr/developing_for_oculus_quest.rst

@@ -35,7 +35,7 @@ project. Your project tree should looks something like this:
 
 Now you can start building the main scene, stick to the bare minimum.
 
-- Add an ARVROrigin node first. 
+- Add an ARVROrigin node first.
 - Then Add three child nodes to the origin node, one ARVRCamera and two ARVRControllers.
 - Assign controller id 1 to the first ARVRController and rename that to LeftHand.
 - Assign controller id 2 to the second ARVRController and rename that to RightHand.

+ 70 - 70
tutorials/vr/vr_starter_tutorial/vr_starter_tutorial_part_one.rst

@@ -30,11 +30,11 @@ Throughout the course of this tutorial, we will cover:
 .. tip:: While this tutorial can be completed by beginners, it is highly
           advised to complete :ref:`doc_your_first_game`,
           if you are new to Godot and/or game development.
-          
+
           **Some experience with making 3D games is required** before going through this tutorial series.
           This tutorial assumes you have experience with the Godot editor, GDScript, and basic 3D game development.
           A OpenVR-ready headset and two OpenVR-ready controllers are required.
-          
+
           This tutorial was written and tested using a Windows Mixed Reality headset and controllers. This project has also been tested on the HTC Vive. Code adjustments may be required
           for other VR Headsets, such as the Oculus Rift.
 
@@ -42,17 +42,17 @@ The Godot project for this tutorial is found on the `OpenVR GitHub repository <h
 section on the GitHub repository. The starter assets contain some 3D models, sounds, scripts, and scenes that are configured for this tutorial.
 
 .. note:: **Credits for the assets provided**:
-          
+
           - The sky panorama was created by `CGTuts <https://cgi.tutsplus.com/articles/freebie-8-awesome-ocean-hdris--cg-5684>`_.
-          
-          - The font used is Titillium-Regular 
+
+          - The font used is Titillium-Regular
           - - The font is licensed under the SIL Open Font License, Version 1.1
-          
-          - The audio used are from several different sources, all downloaded from the Sonniss #GameAudioGDC Bundle (`License PDF <https://sonniss.com/gdc-bundle-license/>`_) 
+
+          - The audio used are from several different sources, all downloaded from the Sonniss #GameAudioGDC Bundle (`License PDF <https://sonniss.com/gdc-bundle-license/>`_)
           - - The folders where the audio files are stored have the same name as folders in the Sonniss audio bundle.
-          
+
           - The OpenVR addon was created by `Bastiaan Olij <https://github.com/BastiaanOlij>`_ and is released under the MIT license. It can be found both on the `Godot Asset Library <https://godotengine.org/asset-library/asset/150>`_ and on `GitHub <https://github.com/GodotVR/godot-openvr-asset>`_. *3rd party code and libraries used in the OpenVR addon may be under a different license.*
-          
+
           - The initial project, 3D models, and scripts were created by `TwistedTwigleg <https://github.com/TwistedTwigleg>`_ and is released under the MIT license.
 
 .. tip:: You can find the finished project on the `OpenVR GitHub repository <https://github.com/GodotVR/godot_openvr_fps>`_.
@@ -98,7 +98,7 @@ controller relative to the :ref:`ARVROrigin <class_ARVROrigin>` node. All of the
 An :ref:`ARVRController <class_ARVRController>` node with an ``ID`` of ``1`` represents the left VR controller, while an :ref:`ARVRController <class_ARVRController>` controller with an
 ``ID`` of ``2`` represents the right VR controller.
 
-To summerize: 
+To summarize:
 
 - The :ref:`ARVROrigin <class_ARVROrigin>` node is the center of the VR tracking system and is positioned on the floor.
 
@@ -243,7 +243,7 @@ Select the root node of the scene, either ``Right_Controller`` or ``Left_Control
 the same script, so it doesn't matter which you use first. With ``VR_Controller.gd`` opened, add the following code:
 
 .. tip:: You can copy and paste the code from this page directly into the script editor.
-         
+
          If you do this, all the code copied will be using spaces instead of tabs.
 
          To convert the spaces to tabs in the script editor, click the ``Edit`` menu and select ``Convert Indent To Tabs``.
@@ -262,7 +262,7 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
 
     var grab_area
     var grab_raycast
-    
+
     var grab_mode = "AREA"
     var grab_pos_node
 
@@ -292,26 +292,26 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
         # warning-ignore-all:return_value_discarded
 
         teleport_raycast = get_node("RayCast")
-        
+
         teleport_mesh = get_tree().root.get_node("Game/Teleport_Mesh")
-        
+
         teleport_button_down = false
         teleport_mesh.visible = false
         teleport_raycast.visible = false
-        
+
         grab_area = get_node("Area")
         grab_raycast = get_node("Grab_Cast")
         grab_pos_node = get_node("Grab_Pos")
-        
+
         grab_mode = "AREA"
         grab_raycast.visible = false
-        
+
         get_node("Sleep_Area").connect("body_entered", self, "sleep_area_entered")
         get_node("Sleep_Area").connect("body_exited", self, "sleep_area_exited")
-        
+
         hand_mesh = get_node("Hand")
         hand_pickup_drop_sound = get_node("AudioStreamPlayer3D")
-        
+
         connect("button_pressed", self, "button_pressed")
         connect("button_release", self, "button_released")
 
@@ -321,7 +321,7 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
             rumble -= delta * CONTROLLER_RUMBLE_FADE_SPEED
             if rumble < 0:
                 rumble = 0
-        
+
         if teleport_button_down == true:
             teleport_raycast.force_raycast_update()
             if teleport_raycast.is_colliding():
@@ -329,16 +329,16 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
                     if teleport_raycast.get_collision_normal().y >= 0.85:
                         teleport_pos = teleport_raycast.get_collision_point()
                         teleport_mesh.global_transform.origin = teleport_pos
-        
-        
+
+
         if get_is_active() == true:
             _physics_process_update_controller_velocity(delta)
-        
+
         if held_object != null:
             var held_scale = held_object.scale
             held_object.global_transform = grab_pos_node.global_transform
             held_object.scale = held_scale
-        
+
         _physics_process_directional_movement(delta);
 
 
@@ -348,19 +348,19 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
         if prior_controller_velocities.size() > 0:
             for vel in prior_controller_velocities:
                 controller_velocity += vel
-            
+
             controller_velocity = controller_velocity / prior_controller_velocities.size()
-        
+
         var relative_controller_position = (global_transform.origin - prior_controller_position)
-        
+
         controller_velocity += relative_controller_position
-        
+
         prior_controller_velocities.append(relative_controller_position)
-        
+
         prior_controller_position = global_transform.origin
-        
+
         controller_velocity /= delta;
-        
+
         if prior_controller_velocities.size() > 30:
             prior_controller_velocities.remove(0)
 
@@ -368,30 +368,30 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
     func _physics_process_directional_movement(delta):
         var trackpad_vector = Vector2(-get_joystick_axis(1), get_joystick_axis(0))
         var joystick_vector = Vector2(-get_joystick_axis(5), get_joystick_axis(4))
-        
+
         if trackpad_vector.length() < CONTROLLER_DEADZONE:
             trackpad_vector = Vector2(0,0)
         else:
             trackpad_vector = trackpad_vector.normalized() * ((trackpad_vector.length() - CONTROLLER_DEADZONE) / (1 - CONTROLLER_DEADZONE))
-        
+
         if joystick_vector.length() < CONTROLLER_DEADZONE:
             joystick_vector = Vector2(0,0)
         else:
             joystick_vector = joystick_vector.normalized() * ((joystick_vector.length() - CONTROLLER_DEADZONE) / (1 - CONTROLLER_DEADZONE))
-        
+
         var forward_direction = get_parent().get_node("Player_Camera").global_transform.basis.z.normalized()
         var right_direction = get_parent().get_node("Player_Camera").global_transform.basis.x.normalized()
-        
+
         # Because the trackpad and the joystick will both move the player, we can add them together and normalize
         # the result, giving the combined movement direction
         var movement_vector = (trackpad_vector + joystick_vector).normalized()
-        
+
         var movement_forward = forward_direction * movement_vector.x * delta * MOVEMENT_SPEED
         var movement_right = right_direction * movement_vector.y * delta * MOVEMENT_SPEED
-        
+
         movement_forward.y = 0
         movement_right.y = 0
-        
+
         if (movement_right.length() > 0 or movement_forward.length() > 0):
             get_parent().global_translate(movement_right + movement_forward)
             directional_movement = true
@@ -402,10 +402,10 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
     func button_pressed(button_index):
         if button_index == 15:
             _on_button_pressed_trigger()
-        
+
         if button_index == 2:
             _on_button_pressed_grab()
-            
+
         if button_index == 1:
             _on_button_pressed_menu()
 
@@ -424,18 +424,18 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
     func _on_button_pressed_grab():
         if teleport_button_down == true:
             return
-        
+
         if held_object == null:
             _pickup_rigidbody()
         else:
             _throw_rigidbody()
-        
+
         hand_pickup_drop_sound.play()
 
 
     func _pickup_rigidbody():
         var rigid_body = null
-        
+
         if grab_mode == "AREA":
             var bodies = grab_area.get_overlapping_bodies()
             if len(bodies) > 0:
@@ -444,7 +444,7 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
                         if !("NO_PICKUP" in body):
                             rigid_body = body
                             break
-        
+
         elif grab_mode == "RAYCAST":
             grab_raycast.force_raycast_update()
             if (grab_raycast.is_colliding()):
@@ -452,23 +452,23 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
                 if body is RigidBody:
                     if !("NO_PICKUP" in body):
                         rigid_body = body
-        
-        
+
+
         if rigid_body != null:
-            
+
             held_object = rigid_body
-            
+
             held_object_data["mode"] = held_object.mode
             held_object_data["layer"] = held_object.collision_layer
             held_object_data["mask"] = held_object.collision_mask
-            
+
             held_object.mode = RigidBody.MODE_STATIC
             held_object.collision_layer = 0
             held_object.collision_mask = 0
-            
+
             hand_mesh.visible = false
             grab_raycast.visible = false
-            
+
             if held_object is VR_Interactable_Rigidbody:
                 held_object.controller = self
                 held_object.picked_up()
@@ -477,20 +477,20 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
     func _throw_rigidbody():
         if held_object == null:
             return
-        
+
         held_object.mode = held_object_data["mode"]
         held_object.collision_layer = held_object_data["layer"]
         held_object.collision_mask = held_object_data["mask"]
-        
+
         held_object.apply_impulse(Vector3(0, 0, 0), controller_velocity)
-        
+
         if held_object is VR_Interactable_Rigidbody:
             held_object.dropped()
             held_object.controller = null
-        
+
         held_object = null
         hand_mesh.visible = true
-        
+
         if grab_mode == "RAYCAST":
             grab_raycast.visible = true
 
@@ -500,7 +500,7 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
             grab_mode = "RAYCAST"
             if held_object == null:
                 grab_raycast.visible = true
-        
+
         elif grab_mode == "RAYCAST":
             grab_mode = "AREA"
             grab_raycast.visible = false
@@ -513,13 +513,13 @@ the same script, so it doesn't matter which you use first. With ``VR_Controller.
 
     func _on_button_released_trigger():
         if teleport_button_down == true:
-            
+
             if teleport_pos != null and teleport_mesh.visible == true:
                 var camera_offset = get_parent().get_node("Player_Camera").global_transform.origin - get_parent().global_transform.origin
                 camera_offset.y = 0
-                
+
                 get_parent().global_transform.origin = teleport_pos - camera_offset
-            
+
             teleport_button_down = false
             teleport_mesh.visible = false
             teleport_raycast.visible = false
@@ -565,7 +565,7 @@ First, let's go through all the class variables in the script:
 * ``directional_movement``: A variable to hold whether this VR controller is moving the player using the touchpad/joystick.
 
 .. note:: You can find a great article explaining all about how to handle touchpad/joystick dead zones `here <https://web.archive.org/web/20191208161810/http://www.third-helix.com/2013/04/12/doing-thumbstick-dead-zones-right.html>`__.
-          
+
           We are using a translated version of the scaled radial dead zone code provided in that article for the VR controller's joystick/touchpad.
           The article is a great read, and I highly suggest giving it a look!
 
@@ -853,7 +853,7 @@ calls the ``picked_up`` function on ``held_object``. While we haven't made ``VR_
 being held by a VR controller, where the a reference to the controller is stored in the ``controller`` variable, through calling the ``picked_up`` function.
 
 .. tip:: Don't worry, we will cover ``VR_Interactable_Rigidbody`` after this section!
-         
+
          The code should make more sense after completing part 2 of this tutorial series, where we will actually be using ``VR_Interactable_Rigidbody``.
 
 What this section of code does is that if a :ref:`RigidBody <class_RigidBody>` was found using the grab :ref:`Area <class_Area>` or :ref:`Raycast <class_Raycast>`, it sets it up so that
@@ -877,7 +877,7 @@ We then check to see if the object held extends a class called ``VR_Interactable
 can do whatever it needs to do when dropped, and we set the ``controller`` variable to ``null`` so that the :ref:`RigidBody <class_RigidBody>` knows that it is not being held.
 
 .. tip:: Don't worry, we will cover ``VR_Interactable_Rigidbody`` after this section!
-         
+
          The code should make more sense after completing part 2 of this tutorial series, where we will actually be using ``VR_Interactable_Rigidbody``.
 
 Regardless of whether ``held_object`` extends ``VR_Interactable_Rigidbody`` or not, we then set ``held_object`` to ``null`` so the VR controller knows it is no longer holding anything.
@@ -977,7 +977,7 @@ Once you have ``VR_Interactable_Rigidbody.gd`` open, add the following code:
  .. code-tab:: gdscript GDScript
     class_name VR_Interactable_Rigidbody
     extends RigidBody
-    
+
     # (Ignore the unused variable warning)
     # warning-ignore:unused_class_variable
     var controller = null
@@ -1068,27 +1068,27 @@ Add the following code:
         yield(get_tree(), "idle_frame")
         yield(get_tree(), "idle_frame")
         yield(get_tree(), "idle_frame")
-        
+
         var interface = ARVRServer.primary_interface
-        
+
         if interface == null:
             set_process(false)
             printerr("Movement_Vignette: no VR interface found!")
             return
-        
+
         rect_size = interface.get_render_targetsize()
         rect_position = Vector2(0,0)
-        
+
         controller_one = get_parent().get_node("Left_Controller")
         controller_two = get_parent().get_node("Right_Controller")
-        
+
         visible = false
 
 
     func _process(_delta):
         if (controller_one == null or controller_two == null):
             return
-        
+
         if (controller_one.directional_movement == true or controller_two.directional_movement == true):
             visible = true
         else:

+ 54 - 54
tutorials/vr/vr_starter_tutorial/vr_starter_tutorial_part_two.rst

@@ -57,21 +57,21 @@ Select the ``Sphere_Target_Root`` node and make a new script called ``Sphere_Tar
     func damage(damage):
         if destroyed == true:
             return
-        
+
         health -= damage
-        
+
         if health <= 0:
-            
+
             get_node("CollisionShape").disabled = true
             get_node("Shpere_Target").visible = false
-            
+
             var clone = RIGID_BODY_TARGET.instance()
             add_child(clone)
             clone.global_transform = global_transform
-            
+
             destroyed = true
             set_physics_process(true)
-            
+
             get_node("AudioStreamPlayer").play()
             get_tree().root.get_node("Game").remove_sphere()
 
@@ -90,7 +90,7 @@ First, let's go through all the class variables in the script:
 * ``RIGID_BODY_TARGET``: A constant to hold the scene of the destroyed sphere target.
 
 .. note:: Feel free to check out the ``RIGID_BODY_TARGET`` scene. It is just a bunch of :ref:`RigidBody <class_RigidBody>` nodes and a broken sphere model.
-          
+
           We'll be instancing this scene so when the target is destroyed, it looks like it broke into a bunch of pieces.
 
 
@@ -107,7 +107,7 @@ do when the target has been destroyed.
 
 First this function adds time, ``delta``, to the ``destroyed_timer`` variable. It then checks to see if ``destroyed_timer`` is greater than or equal to
 ``DESTROY_WAIT_TIME``. If ``destroyed_timer`` is greater than or equal to ``DESTROY_WAIT_TIME``, then the sphere target frees/deletes itself by calling
-the ``queue_free`` function. 
+the ``queue_free`` function.
 
 ``damage`` function step-by-step explanation
 """"""""""""""""""""""""""""""""""""""""""""
@@ -195,7 +195,7 @@ node called ``Pistol`` and make a new script called ``Pistol.gd``. Add the follo
 
 .. tabs::
  .. code-tab:: gdscript GDScript
-    
+
     extends VR_Interactable_Rigidbody
 
     var flash_mesh
@@ -213,10 +213,10 @@ node called ``Pistol`` and make a new script called ``Pistol.gd``. Add the follo
     func _ready():
         flash_mesh = get_node("Pistol_Flash")
         flash_mesh.visible = false
-        
+
         laser_sight_mesh = get_node("LaserSight")
         laser_sight_mesh.visible = false
-        
+
         raycast = get_node("RayCast")
         pistol_fire_sound = get_node("AudioStreamPlayer3D")
 
@@ -230,25 +230,25 @@ node called ``Pistol`` and make a new script called ``Pistol.gd``. Add the follo
 
     func interact():
         if flash_timer <= 0:
-            
+
             flash_timer = FLASH_TIME
             flash_mesh.visible = true
-            
+
             raycast.force_raycast_update()
             if raycast.is_colliding():
-                
+
                 var body = raycast.get_collider()
                 var direction_vector = raycast.global_transform.basis.z.normalized()
                 var raycast_distance = raycast.global_transform.origin.distance_to(raycast.get_collision_point())
-                
+
                 if body.has_method("damage"):
                     body.damage(BULLET_DAMAGE)
                 elif body is RigidBody:
                     var collision_force = (COLLISION_FORCE / raycast_distance) * body.mass
                     body.apply_impulse((raycast.global_transform.origin - body.global_transform.origin).normalized(), direction_vector * collision_force)
-            
+
             pistol_fire_sound.play()
-            
+
             if controller != null:
                 controller.rumble = 0.25
 
@@ -397,10 +397,10 @@ Let's write the code for the shotgun. Select the :ref:`RigidBody <class_RigidBod
     func _ready():
         flash_mesh = get_node("Shotgun_Flash")
         flash_mesh.visible = false
-        
+
         laser_sight_mesh = get_node("LaserSight")
         laser_sight_mesh.visible = false
-        
+
         raycasts = get_node("Raycasts")
         shotgun_fire_sound = get_node("AudioStreamPlayer3D")
 
@@ -414,33 +414,33 @@ Let's write the code for the shotgun. Select the :ref:`RigidBody <class_RigidBod
 
     func interact():
         if flash_timer <= 0:
-            
+
             flash_timer = FLASH_TIME
             flash_mesh.visible = true
-            
+
             for raycast in raycasts.get_children():
-                
+
                 if not raycast is RayCast:
                     continue
-                
+
                 raycast.rotation_degrees = Vector3(90 + rand_range(10, -10), 0, rand_range(10, -10))
-                
+
                 raycast.force_raycast_update()
                 if raycast.is_colliding():
-                    
+
                     var body = raycast.get_collider()
                     var direction_vector = raycasts.global_transform.basis.z.normalized()
                     var raycast_distance = raycasts.global_transform.origin.distance_to(raycast.get_collision_point())
-                    
+
                     if body.has_method("damage"):
                         body.damage(BULLET_DAMAGE)
-                    
+
                     if body is RigidBody:
                         var collision_force = (COLLISION_FORCE / raycast_distance) * body.mass
                         body.apply_impulse((raycast.global_transform.origin - body.global_transform.origin).normalized(), direction_vector * collision_force)
-            
+
             shotgun_fire_sound.play()
-            
+
             if controller != null:
                 controller.rumble = 0.25
 
@@ -585,73 +585,73 @@ Let's write the code for the bomb. Select the ``Bomb`` :ref:`RigidBody <class_Ri
 
 
     func _ready():
-        
+
         bomb_mesh = get_node("Bomb")
         explosion_area = get_node("Area")
         fuse_particles = get_node("Fuse_Particles")
         explosion_particles = get_node("Explosion_Particles")
         explosion_sound = get_node("AudioStreamPlayer3D")
-        
+
         set_physics_process(false)
 
 
     func _physics_process(delta):
-        
+
         if fuse_timer < FUSE_TIME:
-            
+
             fuse_timer += delta
-            
+
             if fuse_timer >= FUSE_TIME:
-                
+
                 fuse_particles.emitting = false
-                
+
                 explosion_particles.one_shot = true
                 explosion_particles.emitting = true
-                
+
                 bomb_mesh.visible = false
-                
+
                 collision_layer = 0
                 collision_mask = 0
                 mode = RigidBody.MODE_STATIC
-                
+
                 for body in explosion_area.get_overlapping_bodies():
                     if body == self:
                         pass
                     else:
                         if body.has_method("damage"):
                             body.damage(EXPLOSION_DAMAGE)
-                        
+
                         if body is RigidBody:
                             var direction_vector = body.global_transform.origin - global_transform.origin
                             var bomb_distance = direction_vector.length()
                             var collision_force = (COLLISION_FORCE / bomb_distance) * body.mass
                             body.apply_impulse(Vector3.ZERO, direction_vector.normalized() * collision_force)
-                
+
                 exploded = true
                 explosion_sound.play()
-        
-        
+
+
         if exploded:
-            
+
             explosion_timer += delta
-            
+
             if explosion_timer >= EXPLOSION_TIME:
-                
+
                 explosion_area.monitoring = false
 
                 if controller != null:
                     controller.held_object = null
                     controller.hand_mesh.visible = true
-                    
+
                     if controller.grab_mode == "RAYCAST":
                         controller.grab_raycast.visible = true
-                
+
                 queue_free()
 
 
     func interact():
         set_physics_process(true)
-        
+
         fuse_particles.emitting = true
 
 
@@ -818,23 +818,23 @@ Add the following code:
 
 
     func _physics_process(_delta):
-        
+
         var collision_results = damage_body.move_and_collide(Vector3.ZERO, true, true, true);
-        
+
         if (collision_results != null):
             if collision_results.collider.has_method("damage"):
                 collision_results.collider.damage(SWORD_DAMAGE)
-            
+
             if collision_results.collider is RigidBody:
                 if controller == null:
                     collision_results.collider.apply_impulse(
-                        collision_results.position, 
+                        collision_results.position,
                         collision_results.normal * linear_velocity * COLLISION_FORCE)
                 else:
                     collision_results.collider.apply_impulse(
                         collision_results.position,
                         collision_results.normal * controller.controller_velocity * COLLISION_FORCE)
-            
+
             sword_noise.play()
 
 Let's go over how this script works!