UI 자동화를 위해 바인딩 기능 구현

- 유니티 에셋 인증 오류로 meta 재생성
This commit is contained in:
2026-01-25 01:31:34 +09:00
parent 2ceb28f55d
commit ce83f21c93
1861 changed files with 377882 additions and 211 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: a3e12f7b92d6e49c0b430ae7813c2398
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: c0f26b555bfc14351a9f8ec342fd6ae8
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,18 @@
{
"name": "Unity.InputSystem.DocCodeSamples",
"rootNamespace": "",
"references": [
"GUID:75469ad4d38634e559750d17036d5f7c"
],
"includePlatforms": [
"Editor"
],
"excludePlatforms": [],
"allowUnsafeCode": false,
"overrideReferences": true,
"precompiledReferences": [],
"autoReferenced": false,
"defineConstraints": [],
"versionDefines": [],
"noEngineReferences": false
}

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 41b01d3964f844d8b43923c18b3a9a6f
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,53 @@
using UnityEngine;
using UnityEngine.InputSystem;
namespace DocCodeSamples.Tests
{
internal class GamepadExample : MonoBehaviour
{
void Start()
{
// Print all connected gamepads
Debug.Log(string.Join("\n", Gamepad.all));
}
void Update()
{
var gamepad = Gamepad.current;
// No gamepad connected.
if (gamepad == null)
{
return;
}
// Check if "Button North" was pressed this frame
if (gamepad.buttonNorth.wasPressedThisFrame)
{
Debug.Log("Button North was pressed");
}
// Check if the button control is being continuously actuated and read its value
if (gamepad.rightTrigger.IsActuated())
{
Debug.Log("Right trigger value: " + gamepad.rightTrigger.ReadValue());
}
// Read left stick value and perform some code based on the value
Vector2 move = gamepad.leftStick.ReadValue();
{
// Use the Vector2 move for the game logic here
}
// Creating haptic feedback while "Button South" is pressed and stopping it when released.
if (gamepad.buttonSouth.wasPressedThisFrame)
{
gamepad.SetMotorSpeeds(0.2f, 1.0f);
}
else if (gamepad.buttonSouth.wasReleasedThisFrame)
{
gamepad.ResetHaptics();
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 898672c95e554f2fb492125d78b11af2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,48 @@
using UnityEngine;
using UnityEngine.InputSystem;
namespace DocCodeSamples.Tests
{
internal class GamepadHapticsExample : MonoBehaviour
{
bool hapticsArePaused = false;
void Update()
{
var gamepad = Gamepad.current;
// No gamepad connected, no need to continue.
if (gamepad == null)
return;
float leftTrigger = gamepad.leftTrigger.ReadValue();
float rightTrigger = gamepad.rightTrigger.ReadValue();
// Only set motor speeds if haptics were not paused and if trigger is actuated.
// Both triggers must be actuated past 0.2f to start haptics.
if (!hapticsArePaused &&
(gamepad.leftTrigger.IsActuated() || gamepad.rightTrigger.IsActuated()))
gamepad.SetMotorSpeeds(
leftTrigger < 0.2f ? 0.0f : leftTrigger,
rightTrigger < 0.2f ? 0.0f : rightTrigger);
// Toggle haptics "playback" when "Button South" is pressed.
// Notice that if you release the triggers after pausing,
// and press the button again, haptics will resume.
if (gamepad.buttonSouth.wasPressedThisFrame)
{
if (hapticsArePaused)
gamepad.ResumeHaptics();
else
gamepad.PauseHaptics();
hapticsArePaused = !hapticsArePaused;
}
// Notice that if you release the triggers after pausing,
// and press the Start button, haptics will be reset.
if (gamepad.startButton.wasPressedThisFrame)
gamepad.ResetHaptics();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3bbc200178984676a2dcb977a2fe3bae
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,42 @@
#if UNITY_INPUT_SYSTEM_ENABLE_UI
using UnityEngine;
using UnityEngine.InputSystem.UI;
namespace DocCodeSamples.Tests
{
internal class InputSystemUIInputModuleAssignActionsExample : MonoBehaviour
{
// Reference to the InputSystemUIInputModule component, needs to be provided in the Inspector
public InputSystemUIInputModule uiModule;
void Start()
{
// Assign default actions
AssignActions();
}
void AssignActions()
{
if (uiModule != null)
uiModule.AssignDefaultActions();
else
Debug.LogError("InputSystemUIInputModule not found.");
}
void UnassignActions()
{
if (uiModule != null)
uiModule.UnassignActions();
else
Debug.LogError("InputSystemUIInputModule not found.");
}
void OnDestroy()
{
// Unassign actions when the object is destroyed
UnassignActions();
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 026e1117180341c1bf7847a2cc61f75b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,118 @@
---
uid: input-system-action-assets
---
# Input action assets
An input action asset is an asset which contains a set of [input action](xref:input-system-actions) definitions and their associated [Bindings](xref:input-system-action-bindings) and [control schemes](xref:input-system-action-bindings#control-schemes). These assets have the `.inputactions` file extension and are stored in a plain JSON format.
The input system creates an action asset when you set up the [default project-wide actions](xref:project-wide-actions), but you can also create new action assets directly in the Project window.
For most common scenarios, you do not need to use more than one input action asset. It is usually simpler to configure your project-wide action definition in the Project Settings window.
## Creating input action assets
To create an asset that contains [input actions](xref:input-system-actions) in Unity, right-click in the __Project__ window or go to __Assets > Create > Input Actions__ from Unity's main menu.
## Editing input action assets
To open the Input Actions Editor, double-click an `.inputactions` asset in the Project Browser, or select the __Edit Asset__ button in the Inspector for that asset. You can have more than one editor window open at the same time, but not for the same asset.
This Input Actions Editor is identical to the one that opens in the [Project Settings window](xref:input-system-configuring-input).
## Using input action assets
## Type-safe C# API generation
Input action assets allow you to **generate a C# class** from your action definitions, so you can refer to your actions in a type-safe manner from code. This means you can avoid looking up your actions by string.
### Auto-generating script code for actions
One of the most convenient ways to work with `.inputactions` assets in scripts is to automatically generate a C# wrapper class for them. This provides an easier way to set up callbacks and avoid manually looking up actions and action maps by name.
To enable this option, enable the __Generate C# Class__ property in the input action asset's Inspector, then select __Apply__.
![The input action asset's Inspector window displays the enabled Generate C# Class property with default values for the C# class's file, name, and namespace settings.](Images/FireActionInputAssetInspector.png)
You can optionally choose a path name, class name, and namespace for the generated script, or keep the default values.
This generates a C# script that simplifies working with the asset.
```CSharp
using UnityEngine;
using UnityEngine.InputSystem;
// IGameplayActions is an interface generated from the newly added "gameplay"
// action map, triggered by the "Generate Interfaces" checkbox. Note that if
// you change the default values for the action map, the name of the interface
// will be different.
public class MyPlayerScript : MonoBehaviour, IGameplayActions
{
// MyPlayerControls is the C# class that Unity generated.
// It encapsulates the data from the .inputactions asset we created
// and automatically looks up all the maps and actions for us.
MyPlayerControls controls;
public void OnEnable()
{
if (controls == null)
{
controls = new MyPlayerControls();
// Tell the "gameplay" action map that we want to be
// notified when actions get triggered.
controls.gameplay.SetCallbacks(this);
}
controls.gameplay.Enable();
}
public void OnDisable()
{
controls.gameplay.Disable();
}
public void OnUse(InputAction.CallbackContext context)
{
// 'Use' code here.
}
public void OnMove(InputAction.CallbackContext context)
{
// 'Move' code here.
}
}
```
> [!NOTE]
> To regenerate the .cs file, right-click the .inputactions asset in the Project window and choose "Reimport".
### Using action assets with `PlayerInput`
The [Player Input](xref:input-system-player-input) component provides a convenient way to handle input for one or multiple players. You can assign your action asset to the Player Input component so that it can then automatically handle activating action maps and selecting control schemes for you.
![The PlayerInput component appears with Player set as the Default Map and the Behavior set to Invoke Unity Events.](Images/PlayerInput.png)
### Modifying input action assets at runtime
There are several ways to modify an input action asset at runtime. Any modifications that you make during Play mode to an input action asset do not persist in the asset after you exit Play mode. This means you can test your application in a realistic way in the Editor without having to worry about inadvertently modifying the asset. For examples on how to modify an input action asset, refer to [Create actions in code](xref:input-system-actions#create-actions-in-code) and [Change Bindings](xref:input-system-action-bindings#change-bindings).
### The default actions asset
> [!NOTE]
> The default actions asset is entirely separate from the [default project-wide actions](xref:project-wide-actions). It is a legacy asset that is included in the package for backwards compatibility.
The Input System package provides an asset called `DefaultInputActions.inputactions` which you can reference directly in your projects like any other Unity asset. The asset is also available in code form through the [`DefaultInputActions`](xref:UnityEngine.InputSystem.DefaultInputActions) class.
```CSharp
void Start()
{
// Create an instance of the default actions.
var actions = new DefaultInputActions();
actions.Player.Look.performed += OnLook;
actions.Player.Move.performed += OnMove;
actions.Enable();
}
```

View File

@@ -0,0 +1,952 @@
---
uid: input-system-action-bindings
---
# Input bindings
An [`InputBinding`](xref:UnityEngine.InputSystem.InputBinding) represents a connection between an [action](xref:input-system-actions) and one or more [controls](xref:input-system-controls) identified by a [control path](xref:input-system-controls#control-paths). For example, the right trigger of a gamepad (a control) might be bound to an an action named "accelerate", so that pulling the right trigger causes a car to accelerate in your game.
You can add multiple bindings to an action, which is generally useful for supporting multiple types of input device. For example, in the default set of actions, the "Move" action has a binding to the left gamepad stick and the WASD keys, which means input through any of these bindings will perform the action.
You can also bind multiple controls from the same device to an action. For example, both the left and right trigger of a gamepad could be mapped to the same action, so that pulling either trigger has the same result in your game.
You can also set up [Composite](#composite-bindings) bindings, which don't bind to the controls themselves, but receive their input from **Part Bindings** and then return a value representing a composition of those inputs. For example, the right trigger on the gamepad can act as a strength multiplier on the value of the left stick.
## InputBinding API access
Each `InputBinding` has the following properties:
|Property|Description|
|--------|-----------|
|[`path`](xref:UnityEngine.InputSystem.InputBinding.path)|[Control path](xref:input-system-controls#control-paths) that identifies the control(s) from which the action should receive input.<br><br>Example: `"<Gamepad>/leftStick"`|
|[`overridePath`](xref:UnityEngine.InputSystem.InputBinding.overridePath)|[Control path](xref:input-system-controls#control-paths) that overrides `path`. Unlike `path`, `overridePath` is not persistent, so you can use it to non-destructively override the path on a binding. If it is set to something other than null, it takes effect and overrides `path`. To get the path which is currently in effect (that is, either `path` or `overridePath`), you can query the [`effectivePath`](xref:UnityEngine.InputSystem.InputBinding.effectivePath) property.|
|[`action`](xref:UnityEngine.InputSystem.InputBinding.action)|The name or ID of the action that the binding should trigger. Note that this can be null or empty (for instance, for [composites](#composite-bindings)). Not case-sensitive.<br><br>Example: `"fire"`|
|[`groups`](xref:UnityEngine.InputSystem.InputBinding.groups)|A semicolon-separated list of binding groups that the binding belongs to. Can be null or empty. Binding groups can be anything, but are mostly used for [control schemes](#control-schemes). Not case-sensitive.<br><br>Example: `"Keyboard&Mouse;Gamepad"`|
|[`interactions`](xref:UnityEngine.InputSystem.InputBinding.interactions)|A semicolon-separated list of [Interactions](xref:input-system-interactions) to apply to input on this binding. Note that Unity appends Interactions applied to the [action](xref:input-system-actions) itself (if any) to this list. Not case-sensitive.<br><br>Example: `"slowTap;hold(duration=0.75)"`|
|[`processors`](xref:UnityEngine.InputSystem.InputBinding.processors)|A semicolon-separated list of [Processors](UsingProcessors.md) to apply to input on this binding. Note that Unity appends Processors applied to the [action](xref:input-system-actions) itself (if any) to this list. Not case-sensitive.<br><br>Processors on bindings apply in addition to Processors on controls that are providing values. For example, if you put a `stickDeadzone` Processor on a binding and then bind it to `<Gamepad>/leftStick`, you get deadzones applied twice: once from the deadzone Processor sitting on the `leftStick` control, and once from the binding.<br><br>Example: `"invert;axisDeadzone(min=0.1,max=0.95)"`|
|[`id`](xref:UnityEngine.InputSystem.InputBinding.id)|Unique ID of the binding. You can use it to identify the binding when storing binding overrides in user settings, for example.|
|[`name`](xref:UnityEngine.InputSystem.InputBinding.name)|Optional name of the binding. Identifies part names inside [Composites](#composite-bindings).<br><br>Example: `"Positive"`|
|[`isComposite`](xref:UnityEngine.InputSystem.InputBinding.isComposite)|Whether the binding acts as a [Composite](#composite-bindings).|
|[`isPartOfComposite`](xref:UnityEngine.InputSystem.InputBinding.isPartOfComposite)|Whether the binding is part of a [Composite](#composite-bindings).|
To query the bindings for a specific action, use [`InputAction.bindings`](xref:UnityEngine.InputSystem.InputAction.bindings).
To query a flat list of bindings for all actions in an action map, use [`InputActionMap.bindings`](xref:UnityEngine.InputSystem.InputActionMap.bindings).
## Composite bindings
You might want to have several controls act in unison to mimic a different type of control. The most common example of this is using the W, A, S, and D keys on the keyboard to form a 2D vector control equivalent to mouse deltas or gamepad sticks. Another example is to use two keys to form a 1D axis equivalent to a mouse scroll axis.
This is difficult to implement with normal bindings. You can bind a [`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl) to an action expecting a `Vector2`, but doing so results in an exception at runtime when the Input System tries to read a `Vector2` from a control that can deliver only a `float`.
Composite bindings (that is, bindings that are made up of other bindings) solve this problem. Composites themselves don't bind directly to controls; instead, they source values from other bindings that do, and then synthesize input on the fly from those values.
To see how to create Composites in the editor UI, refer to [Editing Composite Bindings](xref:input-system-configuring-input#edit-composite-bindings).
To create composites in code, use the [`AddCompositeBinding`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.AddCompositeBinding(UnityEngine.InputSystem.InputAction,System.String,System.String,System.String)) method:
```CSharp
myAction.AddCompositeBinding("Axis")
.With("Positive", "<Gamepad>/rightTrigger")
.With("Negative", "<Gamepad>/leftTrigger");
```
Each Composite consists of one binding that has [`InputBinding.isComposite`](xref:UnityEngine.InputSystem.InputBinding.isComposite) set to true, followed by one or more bindings that have [`InputBinding.isPartOfComposite`](xref:UnityEngine.InputSystem.InputBinding.isPartOfComposite) set to true. In other words, several consecutive entries in [`InputActionMap.bindings`](xref:UnityEngine.InputSystem.InputActionMap.bindings) or [`InputAction.bindings`](xref:UnityEngine.InputSystem.InputAction.bindings) together form a Composite.
Note that each composite part can be bound arbitrary many times.
```CSharp
// Make both shoulders and triggers pull on the axis.
myAction.AddCompositeBinding("Axis")
.With("Positive", "<Gamepad>/rightTrigger")
.With("Positive", "<Gamepad>/rightShoulder")
.With("Negative", "<Gamepad>/leftTrigger");
.With("Negative", "<Gamepad>/leftShoulder");
```
Composites can have parameters, just like [Interactions](xref:input-system-interactions) and [Processors](UsingProcessors.md).
```CSharp
myAction.AddCompositeBinding("Axis(whichSideWins=1)");
```
There are currently five Composite types that come with the system out of the box:
- [1D-Axis](#1d-axis): two buttons that pull a 1D axis in the negative and positive direction.
- [2D-Vector](#2d-vector): represents a 4-way button setup where each button represents a cardinal direction, for example a WASD keyboard input (up-down-left-right controls).
- [3D-Vector](#3d-vector): represents a 6-way button where two combinations each control one axis of a 3D Vector.
- [One Modifier](#one-modifier): requires the user to hold down a "modifier" button in addition to another control, for example, "SHIFT+1".
- [Two Modifiers](#two-modifiers): requires the user to hold down two "modifier" buttons in addition to another control, for example, "SHIFT+CTRL+1".
You can also [add your own](#writing-custom-composites) types of Composites.
### 1D Axis
![The Add Positive/Negative binding property is selected for the "fire" action on the Actions panel.](Images/Add1DAxisComposite.png){width="486" height="133"}
![The 1D Axis Composite binding appears under the "fire" action on the Actions panel.](Images/1DAxisComposite.png){width="486" height="142"}
The 1D Axis Composite is made of two buttons: one that pulls a 1D axis in its negative direction, and another that pulls it in its positive direction, using the [`AxisComposite`](xref:UnityEngine.InputSystem.Composites.AxisComposite) class to compute a `float`.
```CSharp
myAction.AddCompositeBinding("1DAxis") // Or just "Axis"
.With("Positive", "<Gamepad>/rightTrigger")
.With("Negative", "<Gamepad>/leftTrigger");
```
The axis Composite has two Part Bindings:
|Part Binding|Type|Description|
|----|----|-----------|
|[`positive`](xref:UnityEngine.InputSystem.Composites.AxisComposite.positive)|`Button`|Controls pulling in the positive direction (towards [`maxValue`](xref:UnityEngine.InputSystem.Composites.AxisComposite.maxValue)).|
|[`negative`](xref:UnityEngine.InputSystem.Composites.AxisComposite.negative)|`Button`|Controls pulling in the negative direction, (towards [`minValue`](xref:UnityEngine.InputSystem.Composites.AxisComposite.minValue)).|
You can set the following parameters on an axis Composite:
|Parameter|Description|
|---------|-----------|
|[`whichSideWins`](xref:UnityEngine.InputSystem.Composites.AxisComposite.whichSideWins)|What happens if both [`positive`](xref:UnityEngine.InputSystem.Composites.AxisComposite.positive) and [`negative`](xref:UnityEngine.InputSystem.Composites.AxisComposite.negative) are actuated. See table below.|
|[`minValue`](xref:UnityEngine.InputSystem.Composites.AxisComposite.minValue)|The value returned if the [`negative`](xref:UnityEngine.InputSystem.Composites.AxisComposite.negative) side is actuated. Default is -1.|
|[`maxValue`](xref:UnityEngine.InputSystem.Composites.AxisComposite.maxValue)|The value returned if the [`positive`](xref:UnityEngine.InputSystem.Composites.AxisComposite.positive) side is actuated. Default is 1.|
If controls from both the `positive` and the `negative` side are actuated, then the resulting value of the axis Composite depends on the `whichSideWin` parameter setting.
| [`WhichSideWins`](xref:UnityEngine.InputSystem.Composites.AxisComposite.WhichSideWins) | Description |
| ------------------------------------------------------------ | ------------------------------------------------------------ |
| (0) `Neither` | Neither side has precedence. The Composite returns the [`midpoint`](xref:UnityEngine.InputSystem.Composites.AxisComposite.midPoint) between `minValue` and `maxValue` as a result. At their default settings, this is 0.<br><br>This is the default value for this setting. |
| (1) `Positive` | The positive side has precedence and the Composite returns `maxValue`. |
| (2) `Negative` | The negative side has precedence and the Composite returns `minValue`. |
> [!NOTE]
> There is no support yet for interpolating between the positive and negative over time.
### 2D Vector
![The Add Up/Down/Left/Right Composite binding is selected for the "Move" action on the Actions panel.](Images/Add2DVectorComposite.png){width="486" height="199"}
![The WASD part bindings appear under the "Move" action on the Actions panel.](Images/2DVectorComposite.png){width="486" height="178"}
A 2D Vector Composite represents a 4-way button setup like the D-pad on gamepads, where each button represents a cardinal direction. This type of Composite binding uses the [`Vector2Composite`](xref:UnityEngine.InputSystem.Composites.Vector2Composite) class to compute a `Vector2`.
This is very useful for representing up-down-left-right controls, such as WASD keyboard input.
```CSharp
myAction.AddCompositeBinding("2DVector") // Or "Dpad"
.With("Up", "<Keyboard>/w")
.With("Down", "<Keyboard>/s")
.With("Left", "<Keyboard>/a")
.With("Right", "<Keyboard>/d");
// To set mode (2=analog, 1=digital, 0=digitalNormalized):
myAction.AddCompositeBinding("2DVector(mode=2)")
.With("Up", "<Gamepad>/leftStick/up")
.With("Down", "<Gamepad>/leftStick/down")
.With("Left", "<Gamepad>/leftStick/left")
.With("Right", "<Gamepad>/leftStick/right");
```
The 2D Vector Composite has four Part Bindings.
|Part Binding|Type|Description|
|----|----|-----------|
|[`up`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.up)|`Button`|Controls representing `(0,1)` (+Y).|
|[`down`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.down)|`Button`|Controls representing `(0,-1)` (-Y).|
|[`left`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.left)|`Button`|Controls representing `(-1,0)` (-X).|
|[`right`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.right)|`Button`|Controls representing `(1,0)` (+X).|
In addition, you can set this parameter on a 2D Vector Composite:
|Parameter|Description|
|---------|-----------|
|[`mode`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.mode)|Whether to treat the inputs as digital or as analog controls.<br><br>If this is set to [`Mode.DigitalNormalized`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.Mode.DigitalNormalized), inputs are treated as buttons (off if below [`defaultButtonPressPoint`](xref:UnityEngine.InputSystem.InputSettings.defaultButtonPressPoint) and on if equal to or greater). Each input is 0 or 1 depending on whether the button is pressed or not. The vector resulting from the up/down/left/right parts is normalized. The result is a diamond-shaped 2D input range.<br><br>If this is set to [`Mode.Digital`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.Mode.Digital), the behavior is essentially the same as [`Mode.DigitalNormalized`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.Mode.DigitalNormalized) except that the resulting vector is not normalized.<br><br>Finally, if this is set to [`Mode.Analog`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.Mode.Analog), inputs are treated as analog (i.e. full floating-point values) and, other than [`down`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.down) and [`left`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.left) being inverted, values will be passed through as is.<br><br>The default is [`Mode.DigitalNormalized`](xref:UnityEngine.InputSystem.Composites.Vector2Composite.Mode.DigitalNormalized).|
> [!NOTE]
> There is no support yet for interpolating between the up/down/left/right over time.
### 3D Vector
![The Add Up/Down/Left/Right/Forward/Backward Composite binding is selected for the "position" action on the Actions panel.](Images/Add3DVectorComposite.png){width="486" height="150"}
![The 3D Vector part bindings appear under the "position" action on the Actions panel.](Images/3DVectorComposite.png){width="486" height="259"
}
A 3D Vector Composite that represents a 6-way button where two combinations each control one axis of a 3D Vector. This type of Composite binding uses the the [`Vector3Composite`](xref:UnityEngine.InputSystem.Composites.Vector3Composite) class to compute a `Vector3`.
```CSharp
myAction.AddCompositeBinding("3DVector")
.With("Up", "<Keyboard>/w")
.With("Down", "<Keyboard>/s")
.With("Left", "<Keyboard>/a")
.With("Right", "<Keyboard>/d");
// To set mode (2=analog, 1=digital, 0=digitalNormalized):
myAction.AddCompositeBinding("3DVector(mode=2)")
.With("Up", "<Gamepad>/leftStick/up")
.With("Down", "<Gamepad>/leftStick/down")
.With("Left", "<Gamepad>/leftStick/left")
.With("Right", "<Gamepad>/leftStick/right");
```
The 3D Vector Composite has four Part Bindings.
|Part Binding|Type|Description|
|----|----|-----------|
|[`up`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.up)|`Button`|Controls representing `(0,1,0)` (+Y).|
|[`down`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.down)|`Button`|Controls representing `(0,-1,0)` (-Y).|
|[`left`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.left)|`Button`|Controls representing `(-1,0,0)` (-X).|
|[`right`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.right)|`Button`|Controls representing `(1,0,0)` (+X).|
|[`forward`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.forward)|`Button`|Controls representing `(0,0,1)` (+Z).|
|[`backward`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.backward)|`Button`|Controls representing `(0,0,-1)` (-Z).|
In addition, you can set the following parameters on a 3D vector Composite:
|Parameter|Description|
|---------|-----------|
|[`mode`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.mode)|Whether to treat the inputs as digital or as analog controls.<br><br>If this is set to [`Mode.DigitalNormalized`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.Mode.DigitalNormalized), inputs are treated as buttons (off if below [`defaultButtonPressPoint`](xref:UnityEngine.InputSystem.InputSettings.defaultButtonPressPoint) and on if equal to or greater). Each input is 0 or 1 depending on whether the button is pressed or not. The vector resulting from the up/down/left/right/forward/backward parts is normalized.<br><br>If this is set to [`Mode.Digital`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.Mode.Digital), the behavior is essentially the same as [`Mode.DigitalNormalized`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.Mode.DigitalNormalized) except that the resulting vector is not normalized.<br><br>Finally, if this is set to [`Mode.Analog`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.Mode.Analog), inputs are treated as analog (that is, full floating-point values) and, other than [`down`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.down), [`left`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.left), and [`backward`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.backward) being inverted, values will be passed through as they are.<br><br>The default is [`Analog`](xref:UnityEngine.InputSystem.Composites.Vector3Composite.Mode.Analog).|
### One Modifier
![The Add Binding With One Modifier Composite binding is selected for the "fire" action on the Actions panel.](Images/AddBindingWithOneModifier.png){width="486" height="129"}
![The One Modifier part bindings appear under the "fire" action on the Actions panel.](Images/OneModifierComposite.png){width="486" height="147"}
A One Modifier Composite requires the user to hold down a "modifier" button in addition to another control from which the actual value of the binding is determined. This can be used, for example, for bindings such as "SHIFT+1". This type of Composite binding uses the [`OneModifierComposite`](xref:UnityEngine.InputSystem.Composites.OneModifierComposite) class. The buttons can be on any device, and can be toggle buttons or full-range buttons such as gamepad triggers.
The result is a value of the same type as the controls bound to the [`binding`](xref:UnityEngine.InputSystem.Composites.OneModifierComposite.binding) part.
```CSharp
// Add binding for "CTRL+1".
myAction.AddCompositeBinding("OneModifier")
.With("Binding", "<Keyboard>/1")
.With("Modifier", "<Keyboard>/ctrl")
// Add binding to mouse delta such that it only takes effect
// while the ALT key is down.
myAction.AddCompositeBinding("OneModifier")
.With("Binding", "<Mouse>/delta")
.With("Modifier", "<Keyboard>/alt");
```
The button with One Modifier Composite has two Part Bindings.
|Part|Type|Description|
|----|----|-----------|
|[`modifier`](xref:UnityEngine.InputSystem.Composites.OneModifierComposite.modifier)|`Button`|Modifier that has to be held for `binding` to come through. If the user holds any of the buttons bound to the `modifier` at the same time as the button that triggers the action, the Composite assumes the value of the `modifier` binding. If the user does not press any button bound to the `modifier`, the Composite remains at default value.|
|[`binding`](xref:UnityEngine.InputSystem.Composites.OneModifierComposite.binding)|Any|The control(s) whose value the Composite assumes while the user holds down the `modifier` button.|
This Composite has no parameters.
### Two Modifiers
![The bindings With Two Modifiers Composite binding is selected for the "fire" action on the Actions panel.](Images/AddBindingWithTwoModifiers.png){width="486" height="119"}
![The Two Modifiers part bindings appear under the "fire" action on the Actions panel.](Images/TwoModifiersComposite.png){width="486" height="149"}
A Two Modifiers Composite requires the user to hold down two "modifier" buttons in addition to another control from which the actual value of the binding is determined. This can be used, for example, for bindings such as "SHIFT+CTRL+1". This type of Composite binding uses the [`TwoModifiersComposite`](xref:UnityEngine.InputSystem.Composites.TwoModifiersComposite) class. The buttons can be on any device, and can be toggle buttons or full-range buttons such as gamepad triggers.
The result is a value of the same type as the controls bound to the [`binding`](xref:UnityEngine.InputSystem.Composites.TwoModifiersComposite.binding) part.
```CSharp
myAction.AddCompositeBinding("TwoModifiers")
.With("Button", "<Keyboard>/1")
.With("Modifier1", "<Keyboard>/leftCtrl")
.With("Modifier1", "<Keyboard>/rightCtrl")
.With("Modifier2", "<Keyboard>/leftShift")
.With("Modifier2", "<Keyboard>/rightShift");
```
The button with Two Modifiers Composite has three Part Bindings.
|Part|Type|Description|
|----|----|-----------|
|[`modifier1`](xref:UnityEngine.InputSystem.Composites.TwoModifiersComposite.modifier1)|`Button`|The first modifier the user must hold alongside `modifier2`, for `binding` to come through. If the user does not press any button bound to the `modifier1`, the Composite remains at default value.|
|[`modifier2`](xref:UnityEngine.InputSystem.Composites.TwoModifiersComposite.modifier2)|`Button`|The second modifier the user must hold alongside `modifier1`, for `binding` to come through. If the user does not press any button bound to the `modifier2`, the Composite remains at default value.|
|[`binding`](xref:UnityEngine.InputSystem.Composites.TwoModifiersComposite.binding)|Any|The control(s) whose value the Composite assumes while the user presses both `modifier1` and `modifier2` at the same time.|
This Composite has no parameters.
### Writing custom Composites
You can define new types of Composites, and register them with the API. Unity treats these the same as predefined types, which the Input System internally defines and registers in the same way.
To define a new type of Composite, create a class based on [`InputBindingComposite<TValue>`](xref:UnityEngine.InputSystem.InputBindingComposite`1).
> [!IMPORTANT]
> Composites must be __stateless__. This means that you cannot store local state that changes depending on the input being processed. For __stateful__ processing on bindings, see [interactions](xref:input-system-interactions#writing-custom-interactions).
```CSharp
// Use InputBindingComposite<TValue> as a base class for a composite that returns
// values of type TValue.
// NOTE: It is possible to define a composite that returns different kinds of values
// but doing so requires deriving directly from InputBindingComposite.
#if UNITY_EDITOR
[InitializeOnLoad] // Automatically register in editor.
#endif
// Determine how GetBindingDisplayString() formats the composite by applying
// the DisplayStringFormat attribute.
[DisplayStringFormat("{firstPart}+{secondPart}")]
public class CustomComposite : InputBindingComposite<float>
{
// Each part binding is represented as a field of type int and annotated with
// InputControlAttribute. Setting "layout" restricts the controls that
// are made available for picking in the UI.
//
// On creation, the int value is set to an integer identifier for the binding
// part. This identifier can read values from InputBindingCompositeContext.
// See ReadValue() below.
[InputControl(layout = "Button")]
public int firstPart;
[InputControl(layout = "Button")]
public int secondPart;
// Any public field that is not annotated with InputControlAttribute is considered
// a parameter of the composite. This can be set graphically in the UI and also
// in the data (e.g. "custom(floatParameter=2.0)").
public float floatParameter;
public bool boolParameter;
// This method computes the resulting input value of the composite based
// on the input from its part bindings.
public override float ReadValue(ref InputBindingCompositeContext context)
{
var firstPartValue = context.ReadValue<float>(firstPart);
var secondPartValue = context.ReadValue<float>(secondPart);
//... do some processing and return value
}
// This method computes the current actuation of the binding as a whole.
public override float EvaluateMagnitude(ref InputBindingCompositeContext context)
{
// Compute normalized [0..1] magnitude value for current actuation level.
}
static CustomComposite()
{
// Can give custom name or use default (type name with "Composite" clipped off).
// Same composite can be registered multiple times with different names to introduce
// aliases.
//
// NOTE: Registering from the static constructor using InitializeOnLoad and
// RuntimeInitializeOnLoadMethod is only one way. You can register the
// composite from wherever it works best for you. Note, however, that
// the registration has to take place before the composite is first used
// in a binding. Also, for the composite to show in the editor, it has
// to be registered from code that runs in edit mode.
InputSystem.RegisterBindingComposite<CustomComposite>();
}
[RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.BeforeSceneLoad)]
static void Init() {} // Trigger static constructor.
}
```
The Composite should now appear in the editor UI when you add a binding, and you can now use it in scripts.
```CSharp
myAction.AddCompositeBinding("custom(floatParameter=2.0)")
.With("firstpart", "<Gamepad>/buttonSouth")
.With("secondpart", "<Gamepad>/buttonNorth");
```
To define a custom parameter editor for the Composite, you can derive from [`InputParameterEditor<TObject>`](xref:UnityEngine.InputSystem.Editor.InputParameterEditor`1).
```CSharp
#if UNITY_EDITOR
public class CustomParameterEditor : InputParameterEditor<CustomComposite>
{
public override void OnGUI()
{
EditorGUILayout.Label("Custom stuff");
target.floatParameter = EditorGUILayout.FloatField("Some Parameter", target.floatParameter);
}
}
#endif
```
## Working with bindings
### Look up bindings
You can retrieve the bindings of an action using its [`InputAction.bindings`](xref:UnityEngine.InputSystem.InputAction.bindings) property which returns a read-only array of [`InputBinding`](xref:UnityEngine.InputSystem.InputBinding) structs.
```CSharp
// Get bindings of "fire" action.
var fireBindings = playerInput.actions["fire"].bindings;
```
Also, all the bindings for all actions in an [`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap) are made available through the [`InputActionMap.bindings`](xref:UnityEngine.InputSystem.InputActionMap.bindings) property. The bindings are associated with actions through an [action ID](xref:UnityEngine.InputSystem.InputAction.id) or [action name](xref:UnityEngine.InputSystem.InputAction.name) stored in the [`InputBinding.action`](xref:UnityEngine.InputSystem.InputBinding.action) property.
```CSharp
// Get all bindings in "gameplay" action map.
var gameplayBindings = playerInput.actions.FindActionMap("gameplay").bindings;
```
You can also look up specific the indices of specific bindings in [`InputAction.bindings`](xref:UnityEngine.InputSystem.InputAction.bindings) using the [`InputActionRebindingExtensions.GetBindingIndex`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.GetBindingIndex(UnityEngine.InputSystem.InputAction,UnityEngine.InputSystem.InputBinding)) method.
```CSharp
// Find the binding in the "Keyboard" control scheme.
playerInput.actions["fire"].GetBindingIndex(group: "Keyboard");
// Find the first binding to the space key in the "gameplay" action map.
playerInput.FindActionMap("gameplay").GetBindingIndex(
new InputBinding { path = "<Keyboard>/space" });
```
Finally, you can look up the binding that corresponds to a specific control through [`GetBindingIndexForControl`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.GetBindingIndexForControl*). This way, you can, for example, map a control found in the [`controls`](xref:UnityEngine.InputSystem.InputAction.controls) array of an [`InputAction`](xref:UnityEngine.InputSystem.InputAction) back to an [`InputBinding`](xref:UnityEngine.InputSystem.InputBinding).
```CSharp
// Find the binding that binds LMB to "fire". If there is no such binding,
// bindingIndex will be -1.
var fireAction = playerInput.actions["fire"];
var bindingIndex = fireAction.GetBindingIndexForControl(Mouse.current.leftButton);
if (binding == -1)
Debug.Log("Fire is not bound to LMB of the current mouse.");
```
### Change bindings
In general, you can change existing bindings via the [`InputActionSetupExtensions.ChangeBinding`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.ChangeBinding(UnityEngine.InputSystem.InputAction,System.Int32)) method. This returns an accessor that can be used to modify the properties of the targeted [`InputBinding`](xref:UnityEngine.InputSystem.InputBinding). Note that most of the write operations of the accessor are destructive. For non-destructive changes to bindings, refer to [Apply overrides](#apply-overrides).
```CSharp
// Get write access to the second binding of the 'fire' action.
var accessor = playerInput.actions['fire'].ChangeBinding(1);
// You can also gain access through the InputActionMap. Each
// map contains an array of all its bindings (see InputActionMap.bindings).
// Here we gain access to the third binding in the map.
accessor = playerInput.actions.FindActionMap("gameplay").ChangeBinding(2);
```
You can use the resulting accessor to modify properties through methods such as [`WithPath`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.BindingSyntax.WithPath*) or [`WithProcessors`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.BindingSyntax.WithProcessors*).
```CSharp
playerInput.actions["fire"].ChangeBinding(1)
// Change path to space key.
.WithPath("<Keyboard>/space");
```
You can also use the accessor to iterate through bindings using [`PreviousBinding`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.BindingSyntax.PreviousBinding*) and [`NextBinding`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.BindingSyntax.NextBinding*).
```CSharp
// Move accessor to previous binding.
accessor = accessor.PreviousBinding();
// Move accessor to next binding.
accessor = accessor.NextBinding();
```
If the given binding is a [composite](xref:UnityEngine.InputSystem.InputBinding.isComposite), you can address it by its name rather than by index.
```CSharp
// Change the 2DVector composite of the "move" action.
playerInput.actions["move"].ChangeCompositeBinding("2DVector")
//
playerInput.actions["move"].ChangeBinding("WASD")
```
#### Apply overrides
You can override aspects of any binding at run-time non-destructively. Specific properties of [`InputBinding`](xref:UnityEngine.InputSystem.InputBinding) have an `override` variant that, if set, will take precedent over the property that they shadow. All `override` properties are of type `String`.
|Property|Override|Description|
|--------|--------|-----------|
|[`path`](xref:UnityEngine.InputSystem.InputBinding.path)|[`overridePath`](xref:UnityEngine.InputSystem.InputBinding.overridePath)|Replaces the [control path](xref:input-system-controls#control-paths) that determines which control(s) are referenced in the binding. If [`overridePath`](xref:UnityEngine.InputSystem.InputBinding.overridePath) is set to an empty string, the binding is effectively disabled.<br><br>Example: `"<Gamepad>/leftStick"`|
|[`processors`](xref:UnityEngine.InputSystem.InputBinding.processors)|[`overrideProcessors`](xref:UnityEngine.InputSystem.InputBinding.overrideProcessors)|Replaces the [processors](./UsingProcessors.md) applied to the binding.<br><br>Example: `"invert,normalize(min=0,max=10)"`|
|[`interactions`](xref:UnityEngine.InputSystem.InputBinding.interactions)|[`overrideInteractions`](xref:UnityEngine.InputSystem.InputBinding.overrideInteractions)|Replaces the [interactions](xref:input-system-interactions) applied to the binding.<br><br>Example: `"tap(duration=0.5)"`|
> [!NOTE]
> The `override` property values are not saved with the actions, for example, when calling [`InputActionAsset.ToJson()`](xref:UnityEngine.InputSystem.InputActionAsset.ToJson)). Refer to [Saving and loading rebinds](#save-and-load-rebinds) for details about how to persist user rebinds.
To set the various `override` properties, you can use the [`ApplyBindingOverride`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.ApplyBindingOverride(UnityEngine.InputSystem.InputAction,UnityEngine.InputSystem.InputBinding)) APIs.
```CSharp
// Rebind the "fire" action to the left trigger on the gamepad.
playerInput.actions["fire"].ApplyBindingOverride("<Gamepad>/leftTrigger");
```
In most cases, it is best to locate specific bindings using APIs such as [`GetBindingIndexForControl`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.GetBindingIndexForControl*) and to then apply the override to that specific binding.
```CSharp
// Find the "Jump" binding for the space key.
var jumpAction = playerInput.actions["Jump"];
var bindingIndex = jumpAction.GetBindingIndexForControl(Keyboard.current.spaceKey);
// And change it to the enter key.
jumpAction.ApplyBindingOverride(bindingIndex, "<Keyboard>/enter");
```
#### Erase bindings
You can erase a binding by calling [`Erase`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.BindingSyntax.Erase*) on the [binding accessor](xref:UnityEngine.InputSystem.InputActionSetupExtensions.BindingSyntax).
```CSharp
// Erase first binding on "fire" action.
playerInput.actions["fire"].ChangeBinding(0).Erase();
// Erase "2DVector" composite. This will also erase the part
// bindings of the composite.
playerInput.actions["move"].ChangeCompositeBinding("2DVector").Erase();
// Can also do this by using the name given to the composite binding.
playerInput.actions["move"].ChangeCompositeBinding("WASD").Erase();
// Erase first binding in "gameplay" action map.
playerInput.actions.FindActionMap("gameplay").ChangeBinding(0).Erase();
```
#### Add bindings
New bindings can be added to an action using [`AddBinding`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.AddBinding(UnityEngine.InputSystem.InputAction,System.String,System.String,System.String,System.String)) or [`AddCompositeBinding`](xref:UnityEngine.InputSystem.InputActionSetupExtensions.AddCompositeBinding(UnityEngine.InputSystem.InputAction,System.String,System.String,System.String)).
```CSharp
// Add a binding for the left mouse button to the "fire" action.
playerInput.actions["fire"].AddBinding("<Mouse>/leftButton");
// Add a WASD composite binding to the "move" action.
playerInput.actions["move"]
.AddCompositeBinding("2DVector")
.With("Up", "<Keyboard>/w")
.With("Left", "<Keyboard>/a")
.With("Down", "<Keyboard>/s")
.With("Right", "<Keyboard>/d");
```
#### Set parameters
A binding may, either through itself or through its associated action, lead to [processor](UsingProcessors.md), [interaction](xref:input-system-interactions), and/or [composite](#composite-bindings) objects being created. These objects can have parameters you can configure through in the [Binding properties view](xref:input-system-configuring-input#bindings) of the [Input Actions Editor](xref:input-system-configuring-input) or through the API. This configuration will give parameters their default value.
```CSharp
// Create an action with a "Hold" interaction on it.
// Set the "duration" parameter to 4 seconds.
var action = new InputAction(interactions: "hold(duration=4)");
```
You can query the current value of any such parameter using the [`GetParameterValue`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.GetParameterValue(UnityEngine.InputSystem.InputAction,System.String,UnityEngine.InputSystem.InputBinding)) API.
```CSharp
// This returns a PrimitiveValue?. It will be null if the
// parameter is not found. Otherwise, it is a PrimitiveValue
// which can be converted to a number or boolean.
var p = action.GetParameterValue("duration");
Debug.Log("'duration' is set to: " + p.Value);
```
The above looks for the parameter on any object found on any of the bindings on the action. You can restrict either or both to a more narrow set.
```CSharp
// Retrieve the value of the "duration" parameter specifically of a
// "Hold" interaction and only look on bindings in the "Gamepad" group.
action.GetParameterValue("hold:duration", InputBinding.MaskByGroup("Gamepad"));
```
Alternatively, you can use an expression parameter to encapsulate both the type and the name of the parameter you want to get the value of. This has the advantage of not needing a string parameter but rather references both the type and the name of the parameter in a typesafe way.
```CSharp
// Retrieve the value of the "duration" parameter of TapInteraction.
// This version returns a float? instead of a PrimitiveValue? as it
// sees the type of "duration" at compile-time.
action.GetParameterValue((TapInteraction x) => x.duration);
```
To alter the current value of a parameter, you can use what is referred to as a "parameter override". You can apply these at the level of an individual [`InputAction`](xref:UnityEngine.InputSystem.InputAction), or at the level of an entire [`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap), or even at the level of an entire [`InputActionAsset`](xref:UnityEngine.InputSystem.InputActionAsset). Such overrides are stored internally and applied automatically even on bindings added later.
To add an override, use the [`ApplyParameterOverride`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.ApplyParameterOverride(UnityEngine.InputSystem.InputAction,System.String,UnityEngine.InputSystem.Utilities.PrimitiveValue,UnityEngine.InputSystem.InputBinding)) API or any of its overloads.
```CSharp
// Set the "duration" parameter on all bindings of the action to 4.
action.ApplyParameterOverride("duration", 4f);
// Set the "duration" parameter specifically for "tap" interactions only.
action.ApplyParameterOverride("tap:duration", 0.5f);
// Set the "duration" parameter on tap interactions but only for bindings
// in the "Gamepad" group.
action.ApplyParameterOverride("tap:duration", 0.5f, InputBinding.MaskByGroup("Gamepad");
// Set tap duration for all bindings in an action map.
map.ApplyParameterOverride("tap:duration", 0.5f);
// Set tap duration for all bindings in an entire asset.
asset.ApplyParameterOverride("tap:duration", 0.5f);
// Like for GetParameterValue, overloads are available that take
// an expression instead.
action.ApplyParameterOverride((TapInteraction x) => x.duration, 0.4f);
map.ApplyParameterOverride((TapInteraction x) => x.duration, 0.4f);
asset.ApplyParameterOverride((TapInteraction x) => x.duration, 0.4f);
```
The new value will be applied immediately and affect all composites, processors, and interactions already in use and targeted by the override.
Note that if multiple parameter overrides are applied &ndash; especially when applying some directly to actions and some to maps or assets &ndash;, there may be conflicts between which override to apply. In this case, an attempt is made to chose the "most specific" override to apply.
```CSharp
// Let's say you have an InputAction `action` that is part of an InputActionAsset asset.
var map = action.actionMap;
var asset = map.asset;
// And you apply a "tap:duration" override to the action.
action.ApplyParameterOverride("tap:duration", 0.6f);
// But also apply a "tap:duration" override to the action specifically
// for bindings in the "Gamepad" group.
action.ApplyParameterOverride("tap:duration", 1f, InputBinding.MaskByGroup("Gamepad"));
// And finally also apply a "tap:duration" override to the entire asset.
asset.ApplyParameterOverride("tap:duration", 0.3f);
// Now, bindings on `action` in the "Gamepad" group will use a value of 1 for tap durations,
// other bindings on `action` will use 0.6, and every other binding in the asset will use 0.3.
```
You can use parameter overrides, for example, to scale mouse delta values on a "Look" action.
```CSharp
// Set up an example "Look" action.
var look = new InputAction("look", type: InputActionType.Value);
look.AddBinding("<Mouse>/delta", groups: "KeyboardMouse", processors: "scaleVector2");
look.AddBinding("<Gamepad>/rightStick", groups: "Gamepad", processors: "scaleVector2");
// Now you can adjust stick sensitivity separately from mouse sensitivity.
look.ApplyParameterOverride("scaleVector2:x", 0.5f, InputBinding.MaskByGroup("KeyboardMouse"));
look.ApplyParameterOverride("scaleVector2:y", 0.5f, InputBinding.MaskByGroup("KeyboardMouse"));
look.ApplyParameterOverride("scaleVector2:x", 2f, InputBinding.MaskByGroup("Gamepad"));
look.ApplyParameterOverride("scaleVector2:y", 2f, InputBinding.MaskByGroup("Gamepad"));
// Alternative to using groups, you can also apply overrides directly to specific binding paths.
look.ApplyParameterOverride("scaleVector2:x", 0.5f, new InputBinding("<Mouse>/delta"));
look.ApplyParameterOverride("scaleVector2:y", 0.5f, new InputBinding("<Mouse>/delta"));
```
> [!NOTE]
> Parameter overrides are *not* persisted along with an asset.
### Interactive rebinding
> [!NOTE]
> To download a sample project which demonstrates how to set up a rebinding user interface with Input System APIs, open the Package Manager, select the Input System Package, and choose the sample project "Rebinding UI" to download.
Runtime rebinding allows users of your application to set their own bindings.
To allow users to choose their own bindings interactively, use the [`InputActionRebindingExtensions.RebindingOperation`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation) class. Call the [`PerformInteractiveRebinding()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.PerformInteractiveRebinding(UnityEngine.InputSystem.InputAction,System.Int32)) method on an action to create a rebinding operation. This operation waits for the Input System to register any input from any device which matches the action's expected control type, then uses [`InputBinding.overridePath`](xref:UnityEngine.InputSystem.InputBinding.overridePath) to assign the control path for that control to the action's bindings. If the user actuates multiple controls, the rebinding operation chooses the control with the highest [magnitude](xref:input-system-controls#control-actuation).
> [!IMPORTANT]
> You must dispose of [`InputActionRebindingExtensions.RebindingOperation`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation) instances via `Dispose()`, so that they don't leak memory on the unmanaged memory heap.
```C#
void RemapButtonClicked(InputAction actionToRebind)
{
var rebindOperation = actionToRebind
.PerformInteractiveRebinding().Start();
}
```
The [`InputActionRebindingExtensions.RebindingOperation`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation) API is highly configurable to match your needs. For example, you can:
* Choose expected control types ([`WithExpectedControlType()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithExpectedControlType(System.Type))).
* Exclude certain controls ([`WithControlsExcluding()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithControlsExcluding(System.String))).
* Set a control to cancel the operation ([`WithCancelingThrough()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithCancelingThrough(UnityEngine.InputSystem.InputControl))).
* Choose which bindings to apply the operation on if the action has multiple bindings ([`WithTargetBinding()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithTargetBinding(System.Int32)), [`WithBindingGroup()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithBindingGroup(System.String)), [`WithBindingMask()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithBindingMask(System.Nullable{UnityEngine.InputSystem.InputBinding}))).
Refer to the scripting API reference for [`InputActionRebindingExtensions.RebindingOperation`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation) for a full overview.
Note that [`PerformInteractiveRebinding()`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.PerformInteractiveRebinding(UnityEngine.InputSystem.InputAction,System.Int32)) automatically applies a set of default configurations based on the given action and targeted binding.
### Save and load rebinds
You can serialize override properties of [bindings](xref:UnityEngine.InputSystem.InputBinding) by serializing them as JSON strings and restoring them from these. Use [`SaveBindingOverridesAsJson`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.SaveBindingOverridesAsJson(UnityEngine.InputSystem.IInputActionCollection2)) to create these strings and [`LoadBindingOverridesFromJson`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.LoadBindingOverridesFromJson(UnityEngine.InputSystem.IInputActionCollection2,System.String,System.Boolean)) to restore overrides from them.
```CSharp
// Store player rebinds in PlayerPrefs.
var rebinds = playerInput.actions.SaveBindingOverridesAsJson();
PlayerPrefs.SetString("rebinds", rebinds);
// Restore player rebinds from PlayerPrefs (removes all existing
// overrides on the actions; pass `false` for second argument
// in case you want to prevent that).
var rebinds = PlayerPrefs.GetString("rebinds");
playerInput.actions.LoadBindingOverridesFromJson(rebinds);
```
#### Restore original bindings
You can remove binding overrides and thus restore defaults by using [`RemoveBindingOverride`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RemoveBindingOverride(UnityEngine.InputSystem.InputAction,System.Int32)) or [`RemoveAllBindingOverrides`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RemoveAllBindingOverrides(UnityEngine.InputSystem.IInputActionCollection2)).
```CSharp
// Remove binding overrides from the first binding of the "fire" action.
playerInput.actions["fire"].RemoveBindingOverride(0);
// Remove all binding overrides from the "fire" action.
playerInput.actions["fire"].RemoveAllBindingOverrides();
// Remove all binding overrides from a player's actions.
playerInput.actions.RemoveAllBindingOverrides();
```
#### Display bindings
It can be useful for the user to know what an action is currently bound to (taking any potentially active rebindings into account) while rebinding UIs, and for on-screen hints while the app is running. You can use [`InputBinding.effectivePath`](xref:UnityEngine.InputSystem.InputBinding.effectivePath) to get the currently active path for a binding (which returns [`overridePath`](xref:UnityEngine.InputSystem.InputBinding.overridePath) if set, or otherwise returns [`path`](xref:UnityEngine.InputSystem.InputBinding.path)).
The easiest way to retrieve a display string for an action is to call [`InputActionRebindingExtensions.GetBindingDisplayString`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.GetBindingDisplayString*) which is an extension method for [`InputAction`](xref:UnityEngine.InputSystem.InputAction).
```CSharp
// Get a binding string for the action as a whole. This takes into account which
// bindings are currently active and the actual controls bound to the action.
m_RebindButton.GetComponentInChildren<Text>().text = action.GetBindingDisplayString();
// Get a binding string for a specific binding on an action by index.
m_RebindButton.GetComponentInChildren<Text>().text = action.GetBindingDisplayString(1);
// Look up binding indices with GetBindingIndex.
var bindingIndex = action.GetBindingIndex(InputBinding.MaskByGroup("Gamepad"));
m_RebindButton.GetComponentInChildren<Text>().text =
action.GetBindingDisplayString(bindingIndex);
```
You can also use this method to replace the text string with images.
```CSharp
// Call GetBindingDisplayString() such that it also returns information about the
// name of the device layout and path of the control on the device. This information
// is useful for reliably associating imagery with individual controls.
// NOTE: The first argument is the index of the binding within InputAction.bindings.
var bindingString = action.GetBindingDisplayString(0, out deviceLayout, out controlPath);
// If it's a gamepad, look up an icon for the control.
Sprite icon = null;
if (!string.IsNullOrEmpty(deviceLayout)
&& !string.IsNullOrEmpty(controlPath)
&& InputSystem.IsFirstLayoutBasedOnSecond(deviceLayout, "Gamepad"))
{
switch (controlPath)
{
case "buttonSouth": icon = aButtonIcon; break;
case "dpad/up": icon = dpadUpIcon; break;
//...
}
}
// If you have an icon, display it instead of the text.
var text = m_RebindButton.GetComponentInChildren<Text>();
var image = m_RebindButton.GetComponentInChildren<Image>();
if (icon != null)
{
// Display icon.
text.gameObject.SetActive(false);
image.gameObject.SetActive(true);
image.sprite = icon;
}
else
{
// Display text.
text.gameObject.SetActive(true);
image.gameObject.SetActive(false);
text.text = bindingString;
}
```
Additionally, each binding has a [`ToDisplayString`](xref:UnityEngine.InputSystem.InputBinding.ToDisplayString(UnityEngine.InputSystem.InputBinding.DisplayStringOptions,UnityEngine.InputSystem.InputControl)) method, which you can use to turn individual bindings into display strings. There is also a generic formatting method for control paths, [`InputControlPath.ToHumanReadableString`](xref:UnityEngine.InputSystem.InputControlPath.ToHumanReadableString(System.String,UnityEngine.InputSystem.InputControlPath.HumanReadableStringOptions,UnityEngine.InputSystem.InputControl)), which you can use with arbitrary control path strings.
Note that the controls a binding resolves to can change at any time, and the display strings for controls might change dynamically. For example, if the user switches the currently active keyboard layout, the display string for each individual key on the [`Keyboard`](xref:UnityEngine.InputSystem.Keyboard) might change.
## Control schemes
A binding can belong to any number of binding groups. Unity stores these on the [`InputBinding`](xref:UnityEngine.InputSystem.InputBinding) class as a semicolon-separated string in the [`InputBinding.groups`](xref:UnityEngine.InputSystem.InputBinding.groups) property, and you can use them for any arbitrary grouping of bindings. To enable different sets of binding groups for an [`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap) or [`InputActionAsset`](xref:UnityEngine.InputSystem.InputActionAsset), you can use the [`InputActionMap.bindingMask`](xref:UnityEngine.InputSystem.InputActionMap.bindingMask)/[`InputActionAsset.bindingMask`](xref:UnityEngine.InputSystem.InputActionAsset.bindingMask) property. The Input System uses this to implement the concept of grouping bindings into different [`InputControlSchemes`](xref:UnityEngine.InputSystem.InputControlScheme).
Control Schemes use binding groups to map bindings in an [`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap) or [`InputActionAsset`](xref:UnityEngine.InputSystem.InputActionAsset) to different types of devices. The [`PlayerInput`](xref:input-system-player-input) class uses these to enable a matching control scheme for a new [user](xref:input-system-user-management) joining the game, based on the device they are playing on.
## Details
### Binding resolution
When the Input System accesses the [controls](xref:input-system-controls) bound to an action for the first time, the action resolves its bindings to match them to existing controls on existing devices. In this process, the action calls [`InputSystem.FindControls<>()`](xref:UnityEngine.InputSystem.InputSystem.FindControls``1(System.String,UnityEngine.InputSystem.InputControlList{``0}@)) (filtering for devices assigned to the InputActionMap, if there are any) for the binding path of each of the action's bindings. This creates a list of resolved controls that are now bound to the action.
Note that a single [binding path](xref:input-system-controls#control-paths) can match multiple controls:
* A specific device path such as `<DualShockGamepad>/buttonEast` matches the "Circle" button on a [PlayStation controller](xref:input-system-gamepad#playstation-controllers). If you have multiple PlayStation controllers connected, it resolves to the "Circle" button on each of these controllers.
* An abstract device path such as `<Gamepad>/buttonEast` matches the right action button on any connected gamepad. If you have a PlayStation controller and an [Xbox controller](xref:input-system-gamepad#xbox-controllers) connected, it resolves to the "Circle" button on the PlayStation controller, and to the "B" button on the Xbox controller.
* A binding path can also contain wildcards, such as `<Gamepad>/button*`. This matches any control on any gamepad with a name starting with "button", which matches all the four action buttons on any connected gamepad. A different example: `*/{Submit}` matches any control tagged with the "Submit" [usage](xref:input-system-controls#control-usages) on any device.
If there are multiple bindings on the same action that all reference the same control(s), the control will effectively feed into the action multiple times. This is to allow, for example, a single control to produce different input on the same action by virtue of being bound in a different fashion (composites, processors, interactions, etc). However, regardless of how many times a control is bound on any given action, it will only be mentioned once in the action's [array of `controls`](xref:UnityEngine.InputSystem.InputAction.controls).
To query the controls that an action resolves to, you can use [`InputAction.controls`](xref:UnityEngine.InputSystem.InputAction.controls). You can also run this query if the action is disabled.
To be notified when binding resolution happens, you can listen to [`InputSystem.onActionChange`](xref:UnityEngine.InputSystem.InputSystem.onActionChange) which triggers [`InputActionChange.BoundControlsAboutToChange`](xref:UnityEngine.InputSystem.InputActionChange.BoundControlsAboutToChange) before modifying control lists and triggers [`InputActionChange.BoundControlsChanged`](xref:UnityEngine.InputSystem.InputActionChange.BoundControlsChanged) after having updated them.
#### Binding resolution while actions are enabled
In certain situations, the [controls](xref:UnityEngine.InputSystem.InputAction.controls) bound to an action have to be updated more than once. For example, if a new [device](xref:input-system-devices) becomes usable with an action, the action may now pick up input from additional controls. Also, if bindings are added, removed, or modified, control lists will need to be updated.
This updating of controls usually happens transparently in the background. However, when an action is [enabled](xref:UnityEngine.InputSystem.InputAction.enabled) and especially when it is [in progress](xref:UnityEngine.InputSystem.InputAction.IsInProgress*), there may be a noticeable effect on the action.
Adding or removing a device &ndash; either [globally](xref:UnityEngine.InputSystem.InputSystem.devices) or to/from the [device list](xref:UnityEngine.InputSystem.InputActionAsset.devices) of an action &ndash; will remain transparent __except__ if an action is in progress and it is the device of its [active control](xref:UnityEngine.InputSystem.InputAction.activeControl) that is being removed. In this case, the action will automatically be [cancelled](xref:UnityEngine.InputSystem.InputAction.canceled).
Modifying the [binding mask](xref:UnityEngine.InputSystem.InputActionAsset.bindingMask) or modifying any of the bindings (such as through [rebinding](#interactive-rebinding) or by adding or removing bindings) will, however, lead to all enabled actions being temporarily disabled and then re-enabled and resumed.
#### Choose which devices to use
> [!NOTE]
> [`InputUser`](xref:input-system-user-management) and [`PlayerInput`](xref:input-system-player-input) make use of this facility automatically. They set [`InputActionMap.devices`](xref:UnityEngine.InputSystem.InputActionMap.devices) automatically based on the devices that are paired to the user.
By default, actions resolve their bindings against all devices present in the Input System (that is, [`InputSystem.devices`](xref:UnityEngine.InputSystem.InputSystem.devices)). For example, if there are two gamepads present in the system, a binding to `<Gamepad>/buttonSouth` picks up both gamepads and allows the action to be used from either.
You can override this behavior by restricting [`InputActionAssets`](xref:UnityEngine.InputSystem.InputActionAsset) or individual [`InputActionMaps`](xref:UnityEngine.InputSystem.InputActionMap) to a specific set of devices. If you do this, binding resolution only takes the controls of the given devices into account.
```
var actionMap = new InputActionMap();
// Restrict the action map to just the first gamepad.
actionMap.devices = new[] { Gamepad.all[0] };
```
### Conflicting inputs
There are two situations where a given input may lead to ambiguity:
1. Several controls are bound to the same action and more than one is feeding input into the action at the same time. Example: an action that is bound to both the left and right trigger on a Gamepad and both triggers are pressed.
2. The input is part of a sequence of inputs and there are several possible such sequences. Example: one action is bound to the `B` key and another action is bound to `Shift-B`.
#### Multiple, concurrently used controls
> [!NOTE]
> This section does not apply to [`PassThrough`](xref:input-system-responding#pass-through) actions as they are by design meant to allow multiple concurrent inputs.
For a [`Button`](xref:input-system-responding#button) or [`Value`](xref:input-system-responding#value) action, there can only be one control at any time that is "driving" the action. This control is considered the [`activeControl`](xref:UnityEngine.InputSystem.InputAction.activeControl).
When an action is bound to multiple controls, the [`activeControl`](xref:UnityEngine.InputSystem.InputAction.activeControl) at any point is the one with the greatest level of ["actuation"](xref:input-system-controls#control-actuation), that is, the largest value returned from [`EvaluateMagnitude`](xref:UnityEngine.InputSystem.InputControl.EvaluateMagnitude*). If a control exceeds the actuation level of the current [`activeControl`](xref:UnityEngine.InputSystem.InputAction.activeControl), it will itself become the active control.
The following example demonstrates this mechanism with a [`Button`](xref:input-system-responding#button) action and also demonstrates the difference to a [`PassThrough`](xref:input-system-responding#pass-through) action.
```CSharp
// Create a button and a pass-through action and bind each of them
// to both triggers on the gamepad.
var buttonAction = new InputAction(type: InputActionType.Button,
binding: "<Gamepad>/*Trigger");
var passThroughAction = new InputAction(type: InputActionType.PassThrough,
binding: "<Gamepad>/*Trigger");
buttonAction.performed += c => Debug.Log("${c.control.name} pressed (Button)");
passThroughAction.performed += c => Debug.Log("${c.control.name} changed (Pass-Through)");
buttonAction.Enable();
passThroughAction.Enable();
// Press the left trigger all the way down.
// This will trigger both buttonAction and passThroughAction. Both will
// see leftTrigger becoming the activeControl.
Set(gamepad.leftTrigger, 1f);
// Will log
// "leftTrigger pressed (Button)" and
// "leftTrigger changed (Pass-Through)"
// Press the right trigger halfway down.
// This will *not* trigger or otherwise change buttonAction as the right trigger
// is actuated *less* than the left one that is already driving action.
// However, passThrough action is not performing such tracking and will thus respond
// directly to the value change. It will perform and make rightTrigger its activeControl.
Set(gamepad.rightTrigger, 0.5f);
// Will log
// "rightTrigger changed (Pass-Through)"
// Release the left trigger.
// For buttonAction, this will mean that now all controls feeding into the action have
// been released and thus the button releases. activeControl will go back to null.
// For passThrough action, this is just another value change. So, the action performs
// and its active control changes to leftTrigger.
Set(gamepad.leftTrigger, 0f);
// Will log
// "leftTrigger changed (Pass-Through)"
```
For [composite bindings](#composite-bindings), magnitudes of the composite as a whole rather than for individual controls are tracked. However, [`activeControl`](xref:UnityEngine.InputSystem.InputAction.activeControl) will stick track individual controls from the composite.
##### Disable conflict resolution
Conflict resolution is always applied to [Button](xref:input-system-responding#button) and [Value](xref:input-system-responding#value) type actions. However, it can be undesirable in situations when an action is simply used to gather any and all inputs from bound controls. For example, the following action would monitor the A button of all available gamepads:
```CSharp
var action = new InputAction(type: InputActionType.PassThrough, binding: "<Gamepad>/buttonSouth");
action.Enable();
```
By using the [Pass-Through](xref:input-system-responding#pass-through) action type, conflict resolution is bypassed and thus, pressing the A button on one gamepad will not result in a press on a different gamepad being ignored.
#### Multiple input sequences (such as keyboard shortcuts)
> [!NOTE]
> The mechanism described here only applies to actions that are part of the same [`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap) or [`InputActionAsset`](xref:UnityEngine.InputSystem.InputActionAsset).
Inputs that are used in combinations with other inputs may also lead to ambiguities. If, for example, the `b` key on the Keyboard is bound both on its own as well as in combination with the `shift` key, then if you first press `shift` and then `b`, the latter key press would be a valid input for either of the actions.
The way this is handled is that bindings will be processed in the order of decreasing "complexity". This metric is derived automatically from the binding:
* A binding that is *not* part of a [composite](#composite-bindings) is assigned a complexity of 1.
* A binding that *is* part of a [composite](#composite-bindings) is assigned a complexity equal to the number of part bindings in the composite.
In our example, this means that a [`OneModifier`](#one-modifier) composite binding to `Shift+B` has a higher "complexity" than a binding to `B` and thus is processed first.
Additionally, the first binding that results in the action changing [phase](xref:input-system-responding#action-callbacks) will "consume" the input. This consuming will result in other bindings to the same input not being processed. So in our example, when `Shift+B` "consumes" the `B` input, the binding to `B` will be skipped.
The following example illustrates how this works at the API level.
```CSharp
// Create two actions in the same map.
var map = new InputActionMap();
var bAction = map.AddAction("B");
var shiftbAction = map.AddAction("ShiftB");
// Bind one of the actions to 'B' and the other to 'SHIFT+B'.
bAction.AddBinding("<Keyboard>/b");
shiftbAction.AddCompositeBinding("OneModifier")
.With("Modifier", "<Keyboard>/shift")
.With("Binding", "<Keyboard>/b");
// Print something to the console when the actions are triggered.
bAction.performed += _ => Debug.Log("B action performed");
shiftbAction.performed += _ => Debug.Log("SHIFT+B action performed");
// Start listening to input.
map.Enable();
// Now, let's assume the left shift key on the keyboard is pressed (here, we manually
// press it with the InputTestFixture API).
Press(Keyboard.current.leftShiftKey);
// And then the B is pressed. This is a valid input for both
// bAction as well as shiftbAction.
//
// What will happen now is that shiftbAction will do its processing first. In response,
// it will *perform* the action (i.e. we see the `performed` callback being invoked) and
// thus "consume" the input. bAction will stay silent as it will in turn be skipped over.
Press(keyboard.bKey);
```
### Initial state check
After an action is [enabled](xref:UnityEngine.InputSystem.InputAction.enabled), it will start reacting to input as it comes in. However, at the time the action is enabled, one or more of the controls that are [bound](xref:UnityEngine.InputSystem.InputAction.controls) to an action may already have a non-default state at that point.
Using what is referred to as an "initial state check", an action can be made to respond to such a non-default state as if the state change happened *after* the action was enabled. The way this works is that in the first input [update](xref:UnityEngine.InputSystem.InputSystem.Update*) after the action was enabled, all its bound controls are checked in turn. If any of them has a non-default state, the action responds right away.
This check is implicitly enabled for [Value](xref:input-system-responding#value) actions. If, for example, you have a `Move` action bound to the left stick on the gamepad and the stick is already pushed in a direction when `Move` is enabled, the character will immediately start walking.
By default, [Button](xref:input-system-responding#button) and [Pass-Through](xref:input-system-responding#pass-through) type actions, do not perform this check. A button that is pressed when its respective action is enabled first needs to be released and then pressed again for it to trigger the action.
However, you can manually enable initial state checks on these types of actions using the checkbox in the Editor:
![The Initial State Check setting appears with a checkmark under the Pass Through action on the Actions panel.](./Images/InitialStateCheck.png){width="486" height="116"}

View File

@@ -0,0 +1,154 @@
---
uid: input-system-actions
---
# Actions
**Actions** are an important concept in the Input System. They allow you to separate the purpose of an input from the device controls which perform that input. For example, the purpose of an input in a game might be to make the player's character move around. The device control associated with that action might be the motion of the left gamepad stick.
To associate an action with one or more device controls, you set up [input bindings](xref:input-system-action-bindings) in the [Input Actions Editor](xref:input-system-configuring-input). Then you can refer to those actions in your code, instead of the specific devices. The input bindings define which device's controls are used to perform the action. For example this screenshot shows the "Move" action's bindings to the left gamepad stick and the keyboard's arrow keys.
![](Images/ActionsBinding.png)<br/>
*The Actions panel of the Input Actions Project Settings window*
When you get a reference to an action in your code, you can use it to check its value, or attach a callback method to be notified when it is performed. For a simple example script demonstrating this, refer to [Workflow Overview - Actions](xref:input-system-workflow-project-wide-actions).
Actions also make it simpler to create a system that lets your players [customize their bindings at runtime](xref:input-system-action-bindings#interactive-rebinding), which is a common requirement for games.
> [!NOTE]
> - Actions are a runtime-only feature. You can't use them in [Editor window code](https://docs.unity3d.com/ScriptReference/EditorWindow.html).
>
> - You can read input without using actions and bindings by directly reading specific device controls. This is less flexible, but can be quicker to implement for certain situations. For more information, refer to [Workflow Overview - Directly Reading Device States](xref:input-system-workflow-direct).
>
> - Although you can reorder actions in this window, the ordering is for visual convenience only, and does not affect the order in which the actions are triggered in your code. If multiple actions are performed in the same frame, the order in which they are reported by the Input System is undefined. To avoid problems, you should not write code that assumes they will be reported in a particular order.
## Scripting access
Here are several important APIs you can use to script with actions in the Input System:
|API name|Description|
|-----|-----------|
|[`InputSystem.actions`](xref:UnityEngine.InputSystem.InputSystem)|A reference to the set of actions assigned as [project-wide actions](xref:project-wide-actions).|
|[`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap)|A named collection of input actions, treated as a group. This is the API equivalent to an entry in the "Action Maps" panel of the [Input Actions Editor](xref:input-system-configuring-input).|
|[`InputAction`](xref:UnityEngine.InputSystem.InputAction)|A named action that can return the current value of the controls that it is bound to, or can trigger callbacks in response to input. This is the API equivalent to an entry in the "Actions" panel of the [Input Actions Editor](xref:input-system-configuring-input).|
|[`InputBinding`](xref:UnityEngine.InputSystem.InputBinding)|The relationship between an action and the specific device controls for which it receives input. For more information about Bindings and how to use them, refer to [Input Bindings](xref:input-system-action-bindings).|
Each action has a name ([`InputAction.name`](xref:UnityEngine.InputSystem.InputAction.name)), which must be unique within the action map that the action belongs to, if any (see [`InputAction.actionMap`](xref:UnityEngine.InputSystem.InputAction.actionMap)). Each action also has a unique ID ([`InputAction.id`](xref:UnityEngine.InputSystem.InputAction.id)), which you can use to reference the action. The ID remains the same even if you rename the action.
Each action map has a name ([`InputActionMap.name`](xref:UnityEngine.InputSystem.InputActionMap.name)), which must also be unique with respect to the other action maps present, if any. Each action map also has a unique ID ([`InputActionMap.id`](xref:UnityEngine.InputSystem.InputActionMap.id)), which you can use to reference the action map. The ID remains the same even if you rename the action map.
## Create actions
Use the [Input Actions Editor](xref:input-system-configuring-input) in the Project Settings window to create actions. This is the recommended workflow if you want to organize all your input actions and bindings in one place, to apply across the whole project. This workflow works for most types of game or app.
![Action Editor Window](Images/ProjectSettingsInputActionsSimpleShot.png)
*The Input Actions Editor in the Project Settings window*
The Input System package API is open and flexible, which provides a lot of flexibility to suit less common scenarios. So if you want to customize your project beyond the standard workflow, you can use these alternative techniques to create actions:
- [Declare actions in MonoBehaviour components](#declare-actions-in-monobehaviours)
- [Load actions from JSON data](#load-actions-from-json)
- [Create actions entirely in code](#create-actions-in-code)
### Declare actions in MonoBehaviours
You can declare individual [`InputAction`](xref:UnityEngine.InputSystem.InputAction) and [`InputActionMap`](xref:UnityEngine.InputSystem.InputActionMap) objects as fields directly inside `MonoBehaviour` components.
```CSharp
using UnityEngine;
using UnityEngine.InputSystem;
public class ExampleScript : MonoBehaviour
{
public InputAction move;
public InputAction jump;
}
```
The result is similar to using an action defined in the Input Actions Editor, except that you define the actions in the GameObject's properties and save them as scene or prefab data, instead of in a dedicated asset.
When you define serialized `InputAction` fields in a `MonoBehaviour` component to embed actions, the GameObject's Inspector window displays a script component similar to the "Actions" panel of the [Input Actions Editor](xref:input-system-configuring-input):
![The Move and Jump actions appear under the ExampleScript component with icons for editing, adding, and removing each action.](Images/Workflow-EmbeddedActionsInspector.png)<br/>
This interface allows you to set up the bindings for those actions. For example:
* To add or remove actions or bindings, select the Add (+) or Remove (-) icon on the action.
* To edit actions, select the gear icon on individual action properties.
* To edit bindings, double-click them.
* To open the context menu, right-click an entry.
* To duplicate an entry, hold the Alt key while dragging it.
Unlike the project-wide actions in the Project Settings window, you must manually enable and disable actions and action maps that are embedded in MonoBehaviour components.
When you use this workflow, the serialized action configurations are stored with the parent GameObject as part of the scene, instead of being serialized with an action asset. This can be useful if you want to bundle the control bindings and behavior together in a single MonoBehaviour or prefab, so it can be distributed together. However, this can also make it harder to organize your full set of control bindings if they are distributed across multiple prefabs or scenes.
### Load actions from JSON
You can load actions as JSON in the form of a set of action maps or as a full [`InputActionAsset`](xref:UnityEngine.InputSystem.InputActionAsset). This also works at runtime in the Player.
```CSharp
// Load a set of action maps from JSON.
var maps = InputActionMap.FromJson(json);
// Load an entire InputActionAsset from JSON.
var asset = InputActionAsset.FromJson(json);
```
### Create actions in code
You can manually create and configure actions entirely in code, including assigning the bindings. This also works at runtime in the Player. For example:
```CSharp
// Create free-standing actions.
var lookAction = new InputAction("look", binding: "<Gamepad>/leftStick");
var moveAction = new InputAction("move", binding: "<Gamepad>/rightStick");
lookAction.AddBinding("<Mouse>/delta");
moveAction.AddCompositeBinding("Dpad")
.With("Up", "<Keyboard>/w")
.With("Down", "<Keyboard>/s")
.With("Left", "<Keyboard>/a")
.With("Right", "<Keyboard>/d");
// Create an action map with actions.
var map = new InputActionMap("Gameplay");
var lookAction = map.AddAction("look");
lookAction.AddBinding("<Gamepad>/leftStick");
// Create an action asset.
var asset = ScriptableObject.CreateInstance<InputActionAsset>();
var gameplayMap = new InputActionMap("gameplay");
asset.AddActionMap(gameplayMap);
var lookAction = gameplayMap.AddAction("look", "<Gamepad>/leftStick");
```
Any action that you create in this way during Play mode doesn't persist in the input action asset after you exit Play mode. This means you can test your application in a realistic manner in the Editor without having to worry about inadvertently modifying the asset.
## Enable actions
Actions have an **enabled** state, meaning you can enable or disable them to suit different situations.
If you have an action asset assigned as [project-wide](xref:project-wide-actions), the actions it contains are enabled by default and ready to use.
For actions defined elsewhere, such as in an action asset not assigned as project-wide, or defined your own code, they begin in a disabled state, and you must enable them before they will respond to input.
You can enable actions individually, or as a group by enabling the action map which contains them.
```CSharp
// Enable a single action.
lookAction.Enable();
// Enable an en entire action map.
gameplayActions.Enable();
```
When you enable an action, the Input System resolves its bindings, unless it has done so already, or if the set of devices that the action can use has not changed. For more details about this process, refer to the documentation on [binding resolution](xref:input-system-action-bindings#binding-resolution).
You can't change certain aspects of the configuration, such as action bindings, while an action is enabled. To stop actions or action maps from responding to input, call [`Disable`](xref:UnityEngine.InputSystem.InputAction.Disable).
While enabled, an action actively monitors the [controls](xref:input-system-controls) it's bound to. If a bound control changes state, the action processes the change. If the control's change represents an [interaction](xref:input-system-interactions) change, the action creates a response. All of this happens during the Input System update logic. Depending on the [update mode](xref:input-system-settings#update-mode) selected in the input settings, this happens once every frame, once every fixed update, or manually if updates are set to manual.

View File

@@ -0,0 +1,139 @@
---
uid: input-system-configuring-input
---
# Configuring input
The **Input Actions Editor** allows you to edit [action assets](xref:input-system-action-assets), which contain a saved configuration of [input actions](xref:input-system-actions) and their associated [bindings](xref:input-system-action-bindings).
It allows you to group collections of actions into [action maps](ActionsEditor.html#configure-action-maps), which represent different input scenarios in your project (such as UI navigation, gameplay, etc.)
It also allows you to define [control schemes](xref:input-system-action-bindings#control-schemes) which are a way to enable or disable a set of devices, or respond to which type of device is being used. This is often useful if you want to customize your UI based on whether your users are using a mouse, keyboard, or gamepad as their chosen input.
## Action assets and project-wide actions
The typical workflow for most projects is to have a single action asset, which is assigned as the **project-wide actions**. Refer to [Project-Wide Actions](xref:project-wide-actions) to create and assign an actions asset as your project-wide action if you haven't already done this.
## The Input Actions Editor window and panels
The **Input Actions Editor** appears when you double-click an action asset to open it.
It also appears in the Project Settings window under **Edit** > **Project Settings** > **Input System Package** if you have an action asset assigned as project-wide.
![The Input Actions Editor displays the three panels and the default actions](./Images/ActionsEditorCallout.png)
The Input Actions Editor is divided into three panels (marked A, B & C in the image above).
|Panel name|Description|
|-|-|
|**(A)&nbsp;Action Maps**|Displays the list of currently defined action maps. Each action map is a collection of actions that you can enable or disable together as a group.|
|**(B)&nbsp;Actions**|Displays all the actions defined in the currently selected action map, and the bindings associated with each action.|
|**(C)&nbsp;Properties**|Displays the properties of the currently selected action or binding from the Actions panel. The title of this panel changes depending on whether you have an action or a binding selected in the Actions panel.|
## Configure action maps
* To add a new action map, select the Add (+) icon in the header of the __Action Maps__ panel.
* To rename an existing action map, either long-click the name, or right-click the action map and select __Rename__ from the context menu. Note that action map names can't contain slashes (`/`).
* To delete an existing action map, right-click it and select __Delete__ from the context menu.
* To duplicate an existing action map, right-click it and select __Duplicate__ from the context menu.
## Configure actions
* To add a new action, select the Add (+) icon in the header of the __Action__ column.
* To rename an existing action, either long-click the name, or right-click the action map and select __Rename__ from the context menu.
* To delete an existing action, either right-click it and select __Delete__ from the context menu.
* To duplicate an existing action, either right-click it and select __Duplicate__ from the context menu.
## Edit action properties
If you select an action, you can edit its properties in the __Action Properties__ panel on the right:
![The Action Properties panel of the Input Actions Editor displays the Action, Interactions, and Processors groups expanded.](Images/ActionProperties.png)
### Action Type
Use the __Action Type__ setting to select **Button**, **Value** or **PassThrough**.
These options relate to whether this action should represent a discrete on/off button-style interaction or a value that can change over time while the control is being used.
For device controls such as keyboard keys, mouse clicks, or gamepad buttons, select **Button**. For device controls such as mouse movement, a joystick or gamepad stick, or device orientation that provide continuously changing input over a period of time, select **Value**.
The Button and Value types of action also provides data about the action such as whether it has started and stopped, and conflict resolution in situations where multiple bindings are mapped to the same action.
The third option, **PassThrough**, is also a value type, and as such is suitable for the same types of device controls as value. The difference is that actions set to PassThrough only provide basic information about the values incoming from the device controls bound to it, and does not provide the extra data relating to the phase of the action, nor does it perform conflict resolution in the case of multiple controls mapped to the same action.
For details about how these types work, refer to [Action types](xref:input-system-responding#action-types) and [Default Interaction](xref:input-system-interactions#default-interaction).
### Control Type
The __Control Type__ setting allows you to select the type of control expected by the action. This limits the controls shown when setting up bindings in the UI and also limits which contols can be bound interactively to the action.
For example, if you select **2D axis**, only those controls that can supply a 2D vector as value are available as options for the binding control path.
There are more specific control types available which further filter the available bindings, such as "Stick", "Dpad" or "Touch". If you select one of these control types, the list of available controls is further limited to only those controls of those specific types when you select a binding for your action (see directly below).
## Bindings
* To add a new binding, select the Add (+) icon on the action you want to add it to, and select the binding type from the menu that appears.
* To delete an existing binding, either right-click it and select __Delete__ from the context menu.
* To duplicate an existing binding, either right-click it and select __Duplicate__ from the context menu.
You can add multiple bindings to an action, which is generally useful for supporting multiple types of input device. For example, in the default set of actions, the "Move" action has a binding to the left gamepad stick and the WSAD keys, which means input through any of these bindings will perform the action.
![](./Images/ActionWithMultipleBindings.png)<br/>
_The default Move action in the Input Actions Editor, displaying the multiple bindings associated with it._
If you select a binding, you can edit its properties in the __Binding Properties__ panel on the right:
![The Binding Properties panel displays the Path value as Left Stick [Gamepad].](Images/BindingProperties.png)
### Set control paths
The most important property of any binding is the [control path](xref:input-system-controls#control-paths) it's bound to. To edit it, open the __Path__ dropdown menu. This displays a control picker window.
![The Binding Properties panel displays the control picker window available from the Path dropdown menu.](Images/InputControlPicker.png)
In the control picker window, you can explore a tree of input devices and controls that the Input System recognizes, and bind to these controls. Unity filters this list by the action's [`expectedControlType`](xref:UnityEngine.InputSystem.InputAction.expectedControlType) property. For example, if the control type is `Vector2`, you can only select a control that generates two-dimensional values, like a stick.
The device and control tree is organized hierarchically from generic to specific. For example, the __Gamepad__ control path `<Gamepad>/buttonSouth` matches the lower action button on any gamepad. Alternatively, if you navigate to __Gamepad__ > __More Specific Gamepads__ and select __PS4 Controller__, and then choose the control path `<DualShockGamepad>/buttonSouth`, this only matches the "Cross" button on PlayStation gamepads, and doesn't match any other gamepads.
Instead of browsing the tree to find the control you want, it's easier to let the Input System listen for input. To do that, select the __Listen__ button. At first, the list of Controls is empty. Once you start pressing buttons or actuating Controls on the Devices you want to bind to, the control picker window starts listing any bindings that match the controls you pressed. Select any of these bindings to view them.
Finally, you can choose to manually edit the binding path, instead of using the control picker. To do that, select the __T__ button next to the control path popup. This changes the popup to a text field, where you can enter any binding string. This also allows you to use wildcard (`*`) characters in your bindings. For example, you can use a binding path such as `<Touchscreen>/touch*/press` to bind to any finger being pressed on the touchscreen, instead of manually binding to `<Touchscreen>/touch0/press`, `<Touchscreen>/touch1/press` and so on.
### Edit composite bindings
Composite bindings are bindings consisting of multiple parts, which form a control together. For instance, a [2D Vector Composite](xref:input-system-action-bindings#2d-vector) uses four buttons (left, right, up, down) to simulate a 2D stick input. Refer to [Composite bindings](xref:input-system-action-bindings#composite-bindings) to learn more.
![The WASD setting appears under the Move property on the Actions panel.](Images/2DVectorComposite.png){width="486" height="178"}
To create a composite binding, in the Input Actions Editor, select the Add (+) icon on the action you want to add it to, and select the composite binding type from the popup menu.
![The Add Up/Down/Left/Right Composite binding is selected for the Move property on the Actions panel.](Images/Add2DVectorComposite.png){width="486" height="199"}
This creates multiple binding entries for the action: one for the Composite as a whole, and then, one level below that, one for each Composite part. The Composite itself doesn't have a binding path property, but its individual parts do, and you can edit these parts like any other binding. Once you bind all the Composite's parts, the Composite can work together as if you bound a single control to the action.
> [!NOTE]
> The set of Composites displayed in the menu is depends on the value type of the action. This means that, for example, if the action is set to type "Button", then only Composites able to return values of type `float` will be shown.
To change the type of a Composite retroactively, select the Composite, then select the new type from the **Composite Type** drop-down in the **Properties** pane.
![The Composite Type binding is set to 2D Vector binding on the Actions panel.](./Images/CompositeType.png){width="486" height="184"}
To change the part of the Composite to which a particular binding is assigned, use the **Composite Part** drop-down in the binding's properties.
![The Composite Part binding is set to Up under the Path binding property.](./Images/CompositePart.png){width="486" height="161"}
You can assign multiple bindings to the same part. You can also duplicate individual part bindings: right-click the binding, then select **Duplicate** to create new part bindings for the Composite. This can be used, for example, to create a single Composite for both "WASD" style controls and arrow keys.
![The Keyboard setting under Move on the Actions panel displays duplicated part bindings.](./Images/DuplicatedPartBindings.png){width="486" height="214"}
## Edit control schemes
Input action assets can have multiple [control schemes](xref:input-system-action-bindings#control-schemes), which let you enable or disable different sets of bindings for your actions for different types of Devices.
![Gamepad appears as the Scheme Name value on the Add Control Scheme window.](Images/ControlSchemeProperties.png)
To see the control schemes in the Input Actions Editor, open the control scheme drop-down list in the top left of the window. This menu lets you add or remove control schemes to your actions asset. If the actions asset contains any control schemes, you can select a control scheme, and then the window only shows bindings that are associated with that scheme. If you select a binding, you can pick the control schemes for which this binding is active in the __Properties__ panel on the left.
When you add a new control scheme, or select an existing control scheme, and then select __Edit Control Scheme__, you can edit the name of the control scheme and which devices the scheme should be active for. When you add a new control scheme, the "Device Type" list is empty by default (as shown above). You must add at least one type of device to this list for the control scheme to be functional.

View File

@@ -0,0 +1,34 @@
---
uid: input-system-architecture
---
# Architecture
The Input System has a layered architecture that consists of a low-level layer and a high-level layer.
# Native backend
The foundation of the Input System is the native backend code. This is platform-specific code which collects information about available Devices and input data from Devices. This code is not part of the Input System package, but is included with Unity itself. It has implementations for each runtime platform supported by Unity. This is why some platform-specific input bugs can only be fixed by an update to Unity, rather than a new version of the Input System package.
The Input System interfaces with the native backend using [events](xref:input-system-events) that the native backend sends. These events notify the system of the creation and removal of [Input Devices](xref:input-system-devices), as well as any updates to the Device states. For efficiency and to avoid creating any garbage, the native backend reports these events as a simple buffer of raw, unmanaged memory containing a stream of events.
The Input System can also send data back to the native backend in the form of [commands](xref:input-system-devices#device-commands) sent to Devices, which are also buffers of memory that the native backend interprets. These commands can have different meanings for different Device types and platforms.
# Input System (low-level)
![Low-Level Architecture](Images/InputArchitectureLowLevel.png)
The low-level Input System code processes and interprets the memory from the event stream that the native backend provides, and dispatches individual events.
The Input System creates Device representations for any newly discovered Device in the event stream. The low-level code sees a Device as a block of raw, unmanaged memory. If it receives a state event for a Device, it writes the data from the state event into the Device's [state representation](xref:input-system-controls#control-state) in memory, so that the state always contains an up-to-date representation of the Device and all its Controls.
The low-level system code also contains structs which describe the data layout of commonly known Devices.
# Input System (high-level)
![High-Level Architecture](Images/InputArchitectureHighLevel.png)
The high-level Input System code interprets the data in a Device's state buffers by using [layouts](xref:input-system-layouts), which describe the data layout of a Device and its Controls in memory. The Input System creates layouts from either the pre-defined structs of commonly known Devices supplied by the low level system, or dynamically at runtime, as in the case of [generic HIDs](xref:input-system-hid#auto-generated-layouts).
Based on the information in the layouts, the Input System then creates [Control](xref:input-system-controls) representations for each of the Device's controls, which let you read the state of each individual Control in a Device.
As part of the high-level system, you can also build another abstraction layer to map Input Controls to your application mechanics. Use [Actions](xref:input-system-actions) to [bind](xref:input-system-action-bindings) one or more Controls to an input in your application. The Input System then monitors these Controls for state changes, and notifies your game logic using [callbacks](xref:input-system-responding#responding-to-actions-using-callbacks). You can also specify more complex behaviors for your Actions using [Processors](UsingProcessors.md) (which perform processing on the input data before sending it to you) and [Interactions](xref:input-system-interactions) (which let you specify patterns of input on a Control to listen to, such as multi-taps).

View File

@@ -0,0 +1,19 @@
---
uid: basic-concepts
---
# Basic Concepts
This page introduces the basic concepts that relate to working with the Input System. They relate to the steps in the sequence of events that occur when a user sends input to your game or app. The Input System provides features which implement these steps, or you can choose to implement some of them yourself.
![A flowchart showing the general workflow of the Input System, with icons representing the different concepts. It starts with the User icon, which then leads into the Input Device and its Controls icon. This then leads into the Action Map and Actions concept. The Input Device and Action Map and Actions icons are collectively grouped under the Binding header. This leads into the final icon representing your action code.](Images/ConceptsOverview.png)
|Concept|Description|
|-------|-----------|
|[**User**](UserManagement.html)| The person playing your game or using your app, by holding or touching the input device and providing input.|
|[**Input Device**](SupportedDevices.html)| Often referred to just as a "**device**" within the context of input. A physical piece of hardware, such as a keyboard, gamepad, mouse, or touchscreen which allows the user to send input into Unity.|
|[**Control**](Controls.html)|The separate individual parts of an input device which each send input values into Unity. For example, a gamepads **controls** comprise multiple buttons, sticks and triggers, and a mouses controls include the two X and Y sensors on the underside, and the various buttons and scroll wheels on the top side.|
|[**Action**](Actions.html)| Actions are a high-level concept that describe individual things that a user might want to do in your game or app, such as "Jump" within a game, or "Select" in an on-screen UI. They are things a user can do in your game or app as a result of input, regardless of what device or control they use to perform it. Actions generally have conceptual names that you choose to suit your project, and should usually be verbs. For example "Run", "Jump" "Crouch", "Use", "Start", "Quit".|
|[**Action Map**](ActionsEditor.html#configure-action-maps) | Action Maps allow you to organize Actions into groups which represent specific situations where a set of actions make sense together. You can simultaneously enable or disable all Actions in an action map, so it is useful to group Actions in Action Maps by the context in which they are relevant. For example, you might have one action map for controlling a player, and another for interacting with your game's UI.|
|[**Binding**](ActionBindings.html)| A connection defined between an **Action** and specific device controls. There are two main types of bindings:<ul><li>**Normal** bindings directly bind to control(s) by means of a [control path](xref:input-system-controls#control-paths). At runtime, any path that matches one or multiple controls will feed input into the binding.</li><li>**Composite** bindings don't bind to controls themselves. Instead, they receive their input from their **Part** bindings and then return a value representing a composition of those inputs. For example, the right trigger on the gamepad can act as a strength multiplier on the value of the left stick.</li></ul> |
|[**Your Action Code**](xref:input-system-responding)| The part of your script which is executed based on the actions you have configured. In your code, you can use references to actions to either read the current value or state of the action (also known as "polling"), or set up a callback to call your own method when actions are performed.|
|[**Action Asset**](xref:input-system-action-assets) | An asset type which contains a saved configuration of Action Maps, Actions and Bindings. You can specify one Action Asset in your project as the [project-wide actions](xref:project-wide-actions), which allows you to easily reference those actions in code by using [`InputSystem.actions`](xref:UnityEngine.InputSystem.InputSystem). |

View File

@@ -0,0 +1,19 @@
---
uid: input-system-contributing
---
# Contributing
The [full source code](https://github.com/Unity-Technologies/InputSystem) for the Input System is available on GitHub. This is also where most of the Input System's development happens.
> [!NOTE]
> This includes the full source code for the managed/C# part of the system. At this point, the native, platform-specific C++ backends are still closed-source and require a source code license.
## Reporting bugs
To report documentation problems, please use the feedback section at the bottom of the page containing the problem.
To report bugs related to the Input System Please follow Unity's standard [bug reporting guidelines](https://unity3d.com/unity/qa/bug-reporting). Don't forget to submit a Project that the developer who picks up your report can use to reproduce the issue. Be sure to mention that the bug is specific to the Input System package in the description, so it gets forwarded to the correct team at Unity.
## Discussion
To ask questions or discuss the Input System, see the [dedicated section on Unity's forum](https://forum.unity.com/forums/new-input-system.103/). This is also the best place to post feature requests.

View File

@@ -0,0 +1,312 @@
---
uid: input-system-controls
---
# Controls
An input control represents a source of values. These values can be of any structured or primitive type. The only requirement is that the type is [blittable](https://docs.microsoft.com/en-us/dotnet/framework/interop/blittable-and-non-blittable-types).
> [!NOTE]
> Controls are for input only. Output and configuration items on input devices are not represented as controls.
## Identification
Each control is identified by its [name](xref:UnityEngine.InputSystem.InputControl.name). Optionally, it can also have a different [display name](xref:UnityEngine.InputSystem.InputControl.displayName). For example, the right-hand face button closest to the touchpad on a PlayStation DualShock 4 controller has the control name "buttonWest" and the display name "Square".
Additionally, a control might have one or more aliases which provide alternative names for the control. You can access the aliases for a specific control through its [`aliases`](xref:UnityEngine.InputSystem.InputControl.aliases) property.
Finally, a control might also have a short display name which can be accessed through the [`shortDisplayName`](xref:UnityEngine.InputSystem.InputControl.shortDisplayName) property. For example, the short display name for the left mouse button is "LMB".
## Control hierarchies
Controls can form hierarchies. The root of a control hierarchy is always a [device](xref:input-system-devices).
The setup of hierarchies is exclusively controlled through [layouts](xref:input-system-layouts).
You can access the parent of a control using its [`parent`](xref:UnityEngine.InputSystem.InputControl.parent) property, and its children using [`children`](xref:UnityEngine.InputSystem.InputControl.children). To access the flattened hierarchy of all controls on a device, use [`allControls`](xref:UnityEngine.InputSystem.InputDevice.allControls).
## Control types
All controls are based on the [`InputControl`](xref:UnityEngine.InputSystem.InputControl) base class. Most concrete implementations are based on [InputControl<TValue>](xref:UnityEngine.InputSystem.InputControl`1).
The Input System provides the following types of controls out of the box:
|Control Type|Description|Example|
|------------|-----------|-------|
|[`AxisControl`](xref:UnityEngine.InputSystem.Controls.AxisControl)|A 1D floating-point axis.|[`Gamepad.leftStick.x`](xref:UnityEngine.InputSystem.Controls.Vector2Control.x)|
|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|A button expressed as a floating-point value. Whether the button can have a value other than 0 or 1 depends on the underlying representation. For example, gamepad trigger buttons can have values other than 0 and 1, but gamepad face buttons generally can't.|[`Mouse.leftButton`](xref:UnityEngine.InputSystem.Mouse.leftButton)|
|[`KeyControl`](xref:UnityEngine.InputSystem.Controls.KeyControl)|A specialized button that represents a key on a [`Keyboard`](xref:UnityEngine.InputSystem.Keyboard). Keys have an associated [`keyCode`](xref:UnityEngine.InputSystem.Controls.KeyControl.keyCode) and, unlike other types of controls, change their display name in accordance to the currently active system-wide keyboard layout. See the [Keyboard](xref:input-system-keyboard) documentation for details.|[`Keyboard.aKey`](xref:UnityEngine.InputSystem.Keyboard.aKey)|
|[`Vector2Control`](xref:UnityEngine.InputSystem.Controls.Vector2Control)|A 2D floating-point vector.|[`Pointer.position`](xref:UnityEngine.InputSystem.Pointer.position)|
|[`Vector3Control`](xref:UnityEngine.InputSystem.Controls.Vector3Control)|A 3D floating-point vector.|[`Accelerometer.acceleration`](xref:UnityEngine.InputSystem.Accelerometer.acceleration)|
|[`QuaternionControl`](xref:UnityEngine.InputSystem.Controls.QuaternionControl)|A 3D rotation.|[`AttitudeSensor.attitude`](xref:UnityEngine.InputSystem.AttitudeSensor.attitude)|
|[`IntegerControl`](xref:UnityEngine.InputSystem.Controls.IntegerControl)|An integer value.|[`Touchscreen.primaryTouch.touchId`](xref:UnityEngine.InputSystem.Controls.TouchControl.touchId)|
|[`StickControl`](xref:UnityEngine.InputSystem.Controls.StickControl)|A 2D stick control like the thumbsticks on gamepads or the stick control of a joystick.|[`Gamepad.rightStick`](xref:UnityEngine.InputSystem.Gamepad.rightStick)|
|[`DpadControl`](xref:UnityEngine.InputSystem.Controls.DpadControl)|A 4-way button control like the D-pad on gamepads or hatswitches on joysticks.|[`Gamepad.dpad`](xref:UnityEngine.InputSystem.Gamepad.dpad)|
|[`TouchControl`](xref:UnityEngine.InputSystem.Controls.TouchControl)|A control that represents all the properties of a touch on a [touch screen](xref:input-system-touch).|[`Touchscreen.primaryTouch`](xref:UnityEngine.InputSystem.Touchscreen.primaryTouch)|
You can browse the set of all registered control layouts in the [input debugger](xref:input-system-debugging#debugging-layouts).
## Control usages
A control can have one or more associated usages. A usage is a string that denotes the control's intended use. An example of a control usage is `Submit`, which labels a control that is commonly used to confirm a selection in the UI. On a gamepad, this usage is commonly found on the `buttonSouth` control.
You can access a control's usages using the [`InputControl.usages`](xref:UnityEngine.InputSystem.InputControl.usages) property.
Usages can be arbitrary strings. However, a certain set of usages is very commonly used and comes predefined in the API in the [`CommonUsages`](xref:UnityEngine.InputSystem.CommonUsages) static class.
## Control paths
The Input System can look up controls using textual paths. [Bindings](xref:input-system-action-bindings) on Input Actions rely on this feature to identify the control(s) they read input from. For example, `<Gamepad>/leftStick/x` means "X control on left stick of gamepad". However, you can also use them for lookup directly on controls and devices, or to let the Input System search for controls among all devices using [`InputSystem.FindControls`](xref:UnityEngine.InputSystem.InputSystem.FindControls(System.String)):
```CSharp
var gamepad = Gamepad.all[0];
var leftStickX = gamepad["leftStick/x"];
var submitButton = gamepad["{Submit}"];
var allSubmitButtons = InputSystem.FindControls("*/{Submit}");
```
Control paths resemble file system paths: they contain components separated by a forward slash (`/`):
component/component...
Each component itself contains a set of [fields](#component-fields) with its own syntax. Each field is individually optional, provided that at least one of the fields is present as either a name or a wildcard:
```structured text
<layoutName>{usageName}#(displayName)controlName
```
You can access the literal path of a given control via its [`InputControl.path`](xref:UnityEngine.InputSystem.InputControl.path) property. If you need to, you can manually parse a control path into its components using the [`InputControlPath.Parse(path)`](xref:UnityEngine.InputSystem.InputControlPath.Parse(System.String)) API:
```CSharp
var parsed = InputControlPath.Parse("<XRController>{LeftHand}/trigger").ToArray();
Debug.Log(parsed.Length); // Prints 2.
Debug.Log(parsed[0].layout); // Prints "XRController".
Debug.Log(parsed[0].name); // Prints an empty string.
Debug.Log(parsed[0].usages.First()); // Prints "LeftHand".
Debug.Log(parsed[1].layout); // Prints null.
Debug.Log(parsed[1].name); // Prints "trigger".
```
### Component fields
All fields are case-insensitive.
The following table explains the use of each field:
|Field|Description|Related links|
|-----|-----|------------------|
|`layoutName`|The name of the layout that the control must be based on. The actual layout of the control may be the same or a layout *based* on the given layout. For example, `<Gamepad>`.|The [Layouts](xref:input-system-layouts) user manual topic<br/><br/>The [InputControlLayout](xref:UnityEngine.InputSystem.Layouts.InputControlLayout) class|
|`usageName`|Works differently for controls and devices:<ul><li>When used on a device (the first component of a path), it requires the device to have the given usage. For example, `<XRController>{LeftHand}/trigger`.</li><li>For looking up a control, the usage field is currently restricted to the path component immediately following the device (the second component in the path). It finds the control on the device that has the given usage. The control can be anywhere in the control hierarchy of the device. For example, `<Gamepad>/{Submit}`.</li></ul>|The [Device usages](xref:input-system-devices#device-usages) user manual topic<br/><br/>The [Control usages](#control-usages) topic on this page<<br/><br/>The [InputControl.usages](xref:UnityEngine.InputSystem.Layouts.InputControlLayout) property|
|`displayName`|Requires the control at the current level to have the given display name. The display name may contain whitespace and symbols. For example:<ul><li>`<Keyboard>/#(a)` matches the key that generates the "a" character, if any, according to the current keyboard layout. </li><li>`<Gamepad>/#(Cross)` matches the button named "Cross" on the Gamepad.</li></ul>|The [Identification](#identification) topic on this page<br/><br/>The [InputControl.displayName](xref:UnityEngine.InputSystem.InputControl.displayName) property|
|`controlName`|Requires the control at the current level to have the given name. Takes both "proper" names such as `MyGamepad/buttonSouth`, and aliases such as `MyGamepad/South` into account.<br><br>This field can also be a wildcard (`*`) to match any name. For example, `*/{PrimaryAction}` matches any `PrimaryAction` usage on devices with any name.|The [Identification](#identification) topic on this page<br/><br/>The [InputControl.name](xref:UnityEngine.InputSystem.InputControl.name) property for "proper" names<br/><br/>The [InputControl.aliases](xref:UnityEngine.InputSystem.InputControl.aliases) property for aliases|
Here are several examples of control paths:
```csharp
// Matches all gamepads (also gamepads *based* on the Gamepad layout):
"<Gamepad>"
// Matches the "Submit" control on all devices:
"*/"
// Matches the key that prints the "a" character on the current keyboard layout:
"<Keyboard>/#(a)"
// Matches the X axis of the left stick on a gamepad.
"<Gamepad>/leftStick/x"
// Matches the orientation control of the right-hand XR controller:
"<XRController>/orientation"
// Matches all buttons on a gamepad.
"<Gamepad>/<Button>"
```
## Control state
Each control is connected to a block of memory that is considered the control's "state". You can query the size, format, and location of this block of memory from a control through the [`InputControl.stateBlock`](xref:UnityEngine.InputSystem.InputControl.stateBlock) property.
The Input System stores the state of controls in unmanaged memory that it handles internally. All devices added to the system share one block of unmanaged memory that contains the state of all the controls on the devices.
A control's state might not be stored in the natural format for that control. For example, the system often represents buttons as bitfields, and axis controls as 8-bit or 16-bit integer values. This format is determined by the combination of platform, hardware, and drivers. Each control knows the format of its storage and how to translate the values as needed. The Input System uses [layouts](xref:input-system-layouts) to understand this representation.
You can access the current state of a control through its [`ReadValue`](xref:UnityEngine.InputSystem.InputControl`1.ReadValue) method.
```CSharp
Gamepad.current.leftStick.x.ReadValue();
```
Each type of control has a specific type of values that it returns, regardless of how many different types of formats it supports for its state. You can access this value type through the [`InputControl.valueType`](xref:UnityEngine.InputSystem.InputControl.valueType) property.
Reading a value from a control might apply one or more value Processors. Refer to the documentation on [Processors](UsingProcessors.md) for more information.
[//]: # (#### Default State - TODO)
[//]: # (#### Reading State vs Reading Values - TODO)
#### Recording state history
If you want to access the history of value changes on a control (for example, in order to compute exit velocity on a touch release), you can record state changes over time with [`InputStateHistory`](xref:UnityEngine.InputSystem.LowLevel.InputStateHistory) or [`InputStateHistory<TValue>`](xref:UnityEngine.InputSystem.LowLevel.InputStateHistory`1). The latter restricts controls to those of a specific value type, which in turn simplifies some of the API.
```CSharp
// Create history that records Vector2 control value changes.
// NOTE: You can also pass controls directly or use paths that match multiple
// controls (e.g. "<Gamepad>/<Button>").
// NOTE: The unconstrained InputStateHistory class can record changes on controls
// of different value types.
var history = new InputStateHistory<Vector2>("<Touchscreen>/primaryTouch/position");
// To start recording state changes of the controls to which the history
// is attached, call StartRecording.
history.StartRecording();
// To stop recording state changes, call StopRecording.
history.StopRecording();
// Recorded history can be accessed like an array.
for (var i = 0; i < history.Count; ++i)
{
// Each recorded value provides information about which control changed
// value (in cases state from multiple controls is recorded concurrently
// by the same InputStateHistory) and when it did so.
var time = history[i].time;
var control = history[i].control;
var value = history[i].ReadValue();
}
// Recorded history can also be iterated over.
foreach (var record in history)
Debug.Log(record.ReadValue());
Debug.Log(string.Join(",\n", history));
// You can also record state changes manually, which allows
// storing arbitrary histories in InputStateHistory.
// NOTE: This records a value change that didn't actually happen on the control.
history.RecordStateChange(Touchscreen.current.primaryTouch.position,
new Vector2(0.123f, 0.234f));
// State histories allocate unmanaged memory and need to be disposed.
history.Dispose();
```
For example, if you want to have the last 100 samples of the left stick on the gamepad available, you can use this code:
```CSharp
var history = new InputStateHistory<Vector2>(Gamepad.current.leftStick);
history.historyDepth = 100;
history.StartRecording();
```
## Control actuation
A control is considered "actuated" when it has moved away from its default state in such a way that it affects the actual value of the control. Use [`IsActuated`](xref:UnityEngine.InputSystem.InputControlExtensions.IsActuated(UnityEngine.InputSystem.InputControl,System.Single)) to query whether a control is currently actuated.
```CSharp
// Check if leftStick is currently actuated.
if (Gamepad.current.leftStick.IsActuated())
Debug.Log("Left Stick is actuated");
```
It can be useful to determine not just whether a control is actuated at all, but also the amount by which it is actuated (that is, its magnitude). For example, for a [`Vector2Control`](xref:UnityEngine.InputSystem.Controls.Vector2Control) this would be the length of the vector, whereas for a button it is the raw, absolute floating-point value.
In general, the current magnitude of a control is always >= 0. However, a control might not have a meaningful magnitude, in which case it returns -1. Any negative value should be considered an invalid magnitude.
You can query the current amount of actuation using [`EvaluateMagnitude`](xref:UnityEngine.InputSystem.InputControl.EvaluateMagnitude).
```CSharp
// Check if left stick is actuated more than a quarter of its motion range.
if (Gamepad.current.leftStick.EvaluateMagnitude() > 0.25f)
Debug.Log("Left Stick actuated past 25%");
```
These two mechanisms use control actuation:
- [Interactive rebinding](xref:input-system-action-bindings#interactive-rebinding) (`InputActionRebindingExceptions.RebindOperation`) uses it to select between multiple suitable controls to find the one that is actuated the most.
- [Conflict resolution](xref:input-system-action-bindings#conflicting-inputs) between multiple controls that are bound to the same action uses it to decide which control gets to drive the action.
## Noisy controls
The Input System can label a control as "noisy", meaning that they can change value without needing any actual or intentional user interaction, such as a gravity [sensor](xref:UnityEngine.InputSystem.Sensor) in a cellphone, or taking orientation readings from an [XR head-mounted display](xref:UnityEngine.InputSystem.XR.XRHMD).
For example, the PS4 controller has a gyroscope sensor built into the device which constantly feeds data about the angular velocity of the device, even if the device just sits there without user interaction. Conversely, the controller's sticks and buttons require user interaction to produce non-default values.
If a control is marked as noisy, it means that:
- The control is not considered for [interactive rebinding](xref:input-system-action-bindings#interactive-rebinding). The [`InputActionRebindingExceptions.RebindingOperation`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation) ignores the control by default (although you can bypass this using [`WithoutIgnoringNoisyControls`](xref:UnityEngine.InputSystem.InputActionRebindingExtensions.RebindingOperation.WithoutIgnoringNoisyControls)).
- If enabled in the Project Settings, the system performs additional event filtering, then calls [`InputDevice.MakeCurrent`](xref:UnityEngine.InputSystem.InputDevice.MakeCurrent). If an input event for a device contains no state change on a control that is not marked noisy, then the device will not be made current based on the event. This avoids, for example, a plugged in PS4 controller constantly making itself the current gamepad ([`Gamepad.current`](xref:UnityEngine.InputSystem.Gamepad.current)) due to its sensors constantly feeding data into the system.
- When the application loses focus and devices are [reset](xref:input-system-devices#device-resets) as a result, the state of noisy controls will be preserved as is. This ensures that sensor readings will remain at their last value rather than being reset to default values. However, while other controls are reset to their default value, noisy controls will not be reset but rather remain at their current value (unless the device is [running in the background](xref:input-system-devices#background-and-focus-change-behavior)). This is based on the assumption that noisy controls most often represent sensor values and snapping the last sampling value back to default will usually have undesirable effects on an application's simulation logic.
> [!NOTE]
> To query whether a control is noisy, use the [`InputControl.noisy`](xref:UnityEngine.InputSystem.InputControl.noisy) property.
>
> If any control on a device is noisy, the device itself is flagged as noisy.
### Noise masks
Parallel to the [`input state`](xref:UnityEngine.InputSystem.InputControl.currentStatePtr) and the [`default state`](xref:UnityEngine.InputSystem.InputControl.defaultStatePtr) that the Input System keeps for all devices currently present, it also maintains a [`noise mask`](xref:UnityEngine.InputSystem.InputControl.noiseMaskPtr) in which only bits for state that is __not__ noise are set. This can be used to very efficiently mask out noise in input.
## Synthetic controls
A synthetic control is an input control that doesn't correspond to an actual physical control on a device (for example the `left`, `right`, `up`, and `down` child controls on a [`StickControl`](xref:UnityEngine.InputSystem.Controls.StickControl)). These controls synthesize input from other, actual physical controls and present it in a different way (in this example, they allow you to treat the individual directions of a stick as buttons).
The system considers synthetic controls for [interactive rebinding](xref:input-system-action-bindings#interactive-rebinding) but always favors non-synthetic controls. If both a synthetic and a non-synthetic control that are a potential match exist (for example, `<Gamepad>/leftStick/x` and `<Gamepad>/leftStick/left`), the non-synthetic control (`<Gamepad>/leftStick/x`) wins by default. This makes it possible to interactively bind to `<Gamepad>/leftStick/left`, for example, but also makes it possible to bind to `<Gamepad>/leftStickPress` without getting interference from the synthetic buttons on the stick.
> [!NOTE]
> To query whether a control is synthetic, use the [`InputControl.synthetic`](xref:UnityEngine.InputSystem.InputControl.synthetic) property.
## Performance Optimization
### Avoiding defensive copies
Use [`InputControl<T>.value`](xref:UnityEngine.InputSystem.InputControl`1.value) instead of [`InputControl<T>.ReadValue`](xref:UnityEngine.InputSystem.InputControl`1.ReadValue) to avoid creating a copy of the control state on every call. This is because `InputControl<T>.value` returns the value as `ref readonly` while `InputControl<T>.ReadValue` always makes a copy. Note that this optimization only applies if the call site assigns the return value to a variable that has been declared `ref readonly`. Otherwise, a copy is made as before.
Additionally, be aware of defensive copies that the compiler can allocate when it is unable to determine that it can safely use the read-only reference. This means that if the compiler can't determine that the reference won't be changed, it will create a defensive copy. For more details, refer to the [.NET guidance on reducing memory allocations](https://learn.microsoft.com/en-us/dotnet/csharp/write-safe-efficient-code#use-ref-readonly-return-statements).
### Control Value Caching
Enable the `USE_READ_VALUE_CACHING` internal feature flag to get the Input System to switch to an optimized path for reading control values. This path efficiently marks controls as "stale" when they have been actuated. Subsequent calls to [`InputControl<T>.ReadValue`](xref:UnityEngine.InputSystem.InputControl`1.ReadValue) will only apply control processing when there have been changes to that control or in the case of any [hard-coded processing](xref:input-system-processors#processors-on-controls) that might exist on the control. For example, [`AxisControl`](xref:UnityEngine.InputSystem.Controls.AxisControl) has built-in inversion, normalization, scaling, and any other processors added to the controls' [processor stack](xref:input-system-processors#processors-on-controls).
> [!NOTE]
> Performance improvements **are currently not guaranteed** for all use cases. Even though this performance path marks controls as "stale" in an efficient way, it still has an overhead which can degrade performance in some cases.
Positive performance impact can occur when:
- Reading from controls that don't change frequently.
- If the controls change every frame, are being read and have actions bound to them as well. For example, reading `leftStick`, `leftStick.x` and `leftStick.left` when there's an action with composite bindings on a Gamepad.
Negative performance impact can occur when:
- Reading from controls that change frequently but which have no bound actions.
- No readings from controls that change frequently.
`USE_READ_VALUE_CACHING` is not enabled by default because it can result in the following minor behavioral changes:
- For control processors that use a global state with cached value optimization, changing the global state of a control processor will have no effect. Reading the control value will only ever return a new value if the physical control has been actuated.
This behavior differs from using global states without cached value optimizations, in which you can read the control value, change the global state, read the control value again, and get a new value due to the fact that the control processor runs on every call.
- Writing to device state using low-level APIs like [`InputControl<T>.WriteValueIntoState`](xref:UnityEngine.InputSystem.InputControl`1.WriteValueIntoState(`0,System.Void*)) doesn't set the stale flag and subsequent calls to [`InputControl<T>.value`](xref:UnityEngine.InputSystem.InputControl`1.value) won't reflect those changes.
- After changing properties on [`AxisControl`](xref:UnityEngine.InputSystem.Controls.AxisControl) the [`ApplyParameterChanges`](xref:UnityEngine.InputSystem.InputControl.ApplyParameterChanges) method has to be called to invalidate cached values.
Processors that must run on every read can set their caching policy to [EvaluateOnEveryRead](xref:UnityEngine.InputSystem.InputProcessor.CachingPolicy.EvaluateOnEveryRead), which disables caching on controls that are using such processors.
You can enable the `PARANOID_READ_VALUE_CACHING_CHECKS` internal feature flag to compare cached and uncached values on every read. If they don't match, the check logs an error.
### Optimized control read value
Enable the `USE_OPTIMIZED_CONTROLS` internal feature flag to get the Input System to access state memory faster for some control instances. This is very specific optimization and should be used with caution.
> [!NOTE]
> This optimization has a performance impact on `PlayMode` because the Input System performs extra checks in order to ensure that the controls have the correct memory representation during development. If you see a performance drop in `PlayMode` when using this optimization, that is expected at this stage.
Most controls are flexible with regards to memory representation. For example, [`AxisControl`](xref:UnityEngine.InputSystem.Controls.AxisControl) can be one bit, multiple bits, a float, or in [`Vector2Control`](xref:UnityEngine.InputSystem.Controls.Vector2Control) where `x` and `y` can have different memory representation. However, most controls use common memory representation patterns, such as [`AxisControl`](xref:UnityEngine.InputSystem.Controls.AxisControl), which uses floats or single bytes. Another example is [`Vector2Control`](xref:UnityEngine.InputSystem.Controls.Vector2Control) which consists of two consecutive floats in memory.
If a control matches a common representation, you can bypass reading its children's controls and cast the memory directly to the common representation. For example if [`Vector2Control`](xref:UnityEngine.InputSystem.Controls.Vector2Control) has two consecutive floats in memory, you can bypass reading `x` and `y` separately and just cast the state memory to `Vector2`.
> [!NOTE]
> This optimization only works if the controls don't need any processing applied, such as `invert`, `clamp`, `normalize`, `scale` or any other processor. If any of these are applied to the control, the control will be read as usual without any optimization.
It is important to explicitly call [`InputControl.ApplyParameterChanges()`](xref:UnityEngine.InputSystem.InputControl.ApplyParameterChanges) to ensure [`InputControl.optimizedControlDataType`](xref:UnityEngine.InputSystem.InputControl.optimizedControlDataType) is updated to the correct memory representation for these specific changes:
- Configuration changes after calling [`InputControl.FinishSetup()`](xref:UnityEngine.InputSystem.InputControl.FinishSetup*).
- Changing parameters such as [`AxisControl.invert`](xref:UnityEngine.InputSystem.Controls.AxisControl.invert), [`AxisControl.clamp`](xref:UnityEngine.InputSystem.Controls.AxisControl.clamp), [`AxisControl.normalize`](xref:UnityEngine.InputSystem.Controls.AxisControl.normalize), [`AxisControl.scale`](xref:UnityEngine.InputSystem.Controls.AxisControl.scale) or changing processors. The memory representation needs to be recalculated after these changes to ensure that the control is no longer optimized. Otherwise, the control will be read with wrong values.
The optimized controls work as follows:
- A potential memory representation is set using [`InputControl.CalculateOptimizedControlDataType()`](xref:UnityEngine.InputSystem.InputControl.CalculateOptimizedControlDataType)
- Its memory representation is stored in [`InputControl.optimizedControlDataType`](xref:UnityEngine.InputSystem.InputControl.optimizedControlDataType)
- Finally, [`ReadUnprocessedValueFromState`](xref:UnityEngine.InputSystem.InputControl`1.ReadUnprocessedValueFromState*) uses the optimized memory representation to decide if it should cast to memory directly instead of reading every children control on its own to reconstruct the controls state.

View File

@@ -0,0 +1,166 @@
---
uid: input-system-debugging
---
# Debugging
When something isn't working as expected, the quickest way to troubleshoot what's wrong is the Input Debugger in the Unity Editor. The Input Debugger provides access to the activity of the Input System in both the Editor and the connected Players.
To open the Input Debugger, go to __Window > Analysis > Input Debugger__ from Unity's main menu.
## Input Debugger
![Input Debugger](Images/InputDebugger.png)
The Input Debugger displays a tree breakdown of the state of the Input System.
|Item|Description|
|----|-----------|
|Devices|A list of all [Input Devices](xref:input-system-devices) that are currently in the system, and a list of unsupported/unrecognized Devices.|
|Layouts|A list of all registered Control and Device layouts. This is the database of supported hardware, and information on how to represent a given piece of input hardware.|
|Actions|Only visible in Play mode, and only if at least one [Action](xref:input-system-actions) is enabled.<br><br>A list of all currently enabled Actions, and the Controls they are bound to.<br><br>See [Debugging Actions](#debugging-actions).|
|Users|Only visible when one or more `InputUser` instances exist. See documentation on [user management](xref:input-system-user-management).<br><br>A list of all currently active users, along with their active Control Schemes and Devices, all their associated Actions, and the Controls these Actions are bound to.<br><br>Note that `PlayerInput` uses `InputUser` to run. When using `PlayerInput` components, each player has an entry in this list.<br><br>See [Debugging users and PlayerInput](#debugging-users-and-playerinput).|
|Settings|The currently active Input System [settings](xref:input-system-settings).|
|Metrics|Statistics about Input System resource usage.|
### Debugging Devices
In the Input Debugger window, navigate to the __Devices__ list and double-click any [Input Device](xref:input-system-devices). This opens a window that displays information about the Device, including real-time state information for its Controls.
![The Unity Editor window displays the Analysis option selected from the Window menu, and the Input Debugger option selected from the Analysis submenu to demonstrate how to access the devices in the Input Debugger tab.](Images/DeviceInDebugger.png)
The top of the Device window displays general information about the specific Device, such as name, manufacturer, associated layout, device flags, device ID and serial number. In addition, this section also display the current __sample frequency__ and __processing delay__ of the deivce.
__Sample frequency__ indicates the frequency in Hertz (Hz) at which the Input System is currently processing samples or events. For devices receiving events this reflects the current flow of events received by the system. For devices receiving periodic readings this reflects the achievable sample rate of the system. The latter may be compared to the globally configured target sampling frequency, while achievable event frequency is uncorrelated to the sample polling frequency setting.
__Processing delay__ indicates the average, minimum and maximum latency contribution from creating an input event or reading until the Input System has processed the same input event. Note that this excludes any additional input delay caused by OS, drivers or device communication. Also note that this excludes any additional output latency that may be caused by additional processing, rendering, GPU swap-chains or display refresh rate.
The __Controls__ section lists the Device's Controls and their individual states. This is useful when debugging input issues, because you can verify whether the data that the Input System receives from the Input Device is what you expect it to be. There are two buttons at the top of this panel:
* __HID Descriptor__: Only displayed for devices that use the HID protocol to connect. This opens a window that displays the detailed [HID](xref:input-system-hid) specifications for the Device and each of it's logical controls.
* __State__: Display the current state of the Device in a new window. This is identical to the information displayed in this view, but doesn't update in real time, so you can take a snapshot of input state data and take the time to inspect it as needed.
The __Events__ section lists all [input events](xref:input-system-events) generated by the Device. You can double-click any event in the list to inspect the full Device state at the time the event occurred. To get a side-by-side difference between the state of the Device at different points in time, select multiple events, right-click them, and click __Compare__ from the context menu.
### Debugging Actions
The Input Debugger window lists all enabled [Actions](xref:input-system-actions) in the __Actions__ list. This list only appears if at least one Action is active and the Editor is in Play mode. If an Action has actively bound Controls, you can click the arrow next to the Action to see a list of the Controls. This is useful to debug whether your Bindings correctly map to the Controls you want them to bind to. See documentation on [Binding resolution](xref:input-system-action-bindings#binding-resolution) for more information about how Unity maps Bindings to Controls.
> [!NOTE]
> Actions that belong to [`InputUsers`](xref:input-system-user-management) don't appear here. They appear in the [__Users__](#debugging-users-and-playerinput) list instead.
### Debugging users and PlayerInput
When there are [`InputUser`](xref:input-system-user-management) instances (if you use `PlayerInput`, each `PlayerInput` instance implicitly creates one), the Input Debugger's __Users__ list displays each instance along with its paired Devices and active Actions. The listed Devices and Actions work the same way as those displayed in the [__Devices__](#debugging-devices) and [__Actions__](#debugging-actions) lists in the debugging window.
### Debugging layouts
The [__Layouts__](xref:input-system-layouts) list in the Input Debugger window displays a breakdown of all registered [Control and Device layouts](xref:input-system-layouts). This is the database of supported hardware and the knowledge of how to represent a given piece of input hardware. It's useful when you want to [create a new Device mapping](xref:input-system-hid#creating-a-custom-device-layout) and see how the Input System represents it.
### Debugging remotely
You can connect the Input Debugger to a Player that runs on a remote computer or device. This makes it possible to observe input activity from the Player in the Editor. This connection uses the `PlayerConnection` mechanism, which is the same one the Unity profiler uses to connect to a Player.
> [!NOTE]
> At the moment, debugging input in Players is restricted to seeing Devices and events from connected Players. There is no support for seeing other input-related data such as Actions and input users from Players.
To see remote Devices from built Players, open the Input Debugger window's __Remote Devices__ drop-down list. This list displays the remote Player instance you can connect to (if there are any). The same list appears in the Profiler and Console windows, and any connections are shared between those windows. If any Player(s) are connected, you can enable __Show remote devices__ in the same drop-down list. If Players are connected, and __Show remote devices__ is enabled, the [__Devices__](#debugging-devices) list in the Input Debugger window splits into a __Local__ section and a __Remote__ section. The __Remote__ section displays any Input Device from any connected Player, and lets you inspect Device state and events in real time, as if it were a local Device.
## Input visualizers
The Input System package comes with a __Visualizers__ sample, which provides various components which let you monitor the state of various Input System elements in real time using on-screen visualizers.
To install the sample, navigate to the Input System package in the Package Manager window (see [Installation](xref:input-system-installation)), and next to the __Visualizers__ sample, click __Import in project__.
The sample provides two visualizer components:
### `InputControlVisualizer`
Visualizes the current state of a single Control in real time. You can have multiple Control visualizers to visualize the state of multiple Controls. Check the `GamepadVisualizer`, `MouseVisualizer`, or `PenVisualizer` Scenes in the sample for examples.
![LeftTrigger and Y appear in green in the InputControlVisualizer to show they are selected.](Images/InputControlVisualizer.png)
### `InputActionVisualizer`
Visualizes the current state of a single Action in real time. You can have multiple Action visualizers to visualize the state of multiple Actions. This can also display the current value of the Action and the Control currently driving the Action, and track the state of [Interactions](xref:input-system-interactions) over time. Check the `SimpleControlsVisualizer` Scene in the sample for examples.
![The Fire Action appears in green in the InputActionVisualizer to show it is selected.](Images/InputActionVisualizer.png)
## Device Simulator
When Device Simulator window is in use, mouse and pen inputs on the simulated device screen are turned into touchscreen inputs. Device Simulator uses its own touchscreen device, which it creates and destroys together with the Device Simulator window.
To prevent conflicts between simulated touchscreen inputs and native mouse and pen inputs, Device Simulator disables all native mouse and pen devices.
## Unity Remote
The Unity Remote is an app available for iOS and Android which allows using a mobile device for input while running in the Unity Editor. You can find details about the app and how to install it in the [Unity manual](https://docs.unity3d.com/Manual/UnityRemote5.html).
If you would like to try out the Unity Remote app, you can [install](xref:input-system-installation#install-samples) the "Unity Remote" sample that is provided with the Input System package.
> [!NOTE]
> Joysticks/gamepads are not yet supported over the Unity Remote. No joystick/gamepad input from the mobile device will come through in the editor.
> [!NOTE]
> This requires Unity 2021.2.18 or later.
When in play mode in the Editor and connected to the Unity Remote app, you will see a number of Devices have been added with the [`InputDevice.remote`](xref:UnityEngine.InputSystem.InputDevice.remote) flag set to true:
- [`Touchscreen`](xref:UnityEngine.InputSystem.Touchscreen)
- [`Accelerometer`](xref:UnityEngine.InputSystem.Accelerometer)
If a gyro is present on the mobile device:
- [`Gyroscope`](xref:UnityEngine.InputSystem.Gyroscope)
- [`AttitudeSensor`](xref:UnityEngine.InputSystem.AttitudeSensor)
- [`LinearAccelerationSensor`](xref:UnityEngine.InputSystem.LinearAccelerationSensor)
- [`GravitySensor`](xref:UnityEngine.InputSystem.GravitySensor)
These Devices can be used just like local Devices. They will receive input from the connected mobile device which in turn will receive the rendered output of the game running in the editor.
The [`Accelerometer`](xref:UnityEngine.InputSystem.Accelerometer) device will automatically be enabled and will not need you to call [`InputSystem.EnableDevice`](xref:UnityEngine.InputSystem.InputSystem.EnableDevice(UnityEngine.InputSystem.InputDevice)) explicitly. Setting the sampling frequency on the accelerometer from the Unity Remote using [`Sensor.samplingFrequency`](xref:UnityEngine.InputSystem.Sensor.samplingFrequency) has no effect.
The remaining sensors listed above will need to be explicitly enabled via [`InputSystem.EnableDevice`](xref:UnityEngine.InputSystem.InputSystem.EnableDevice(UnityEngine.InputSystem.InputDevice)) just like local sensors. Setting the sampling frequency on these sensors from the Unity Remote using [`Sensor.samplingFrequency`](xref:UnityEngine.InputSystem.Sensor.samplingFrequency) will be relayed to the device but note that setting the frequency on one of them will set it for all of them.
Touch coordinates from the device will be translated to the screen coordinates of the Game View inside the Editor.
## Other tips:
To record events flowing through the system, use this code:
```C#
// You can also provide a device ID to only
// trace events for a specific device.
var trace = new InputEventTrace();
trace.Enable();
var current = new InputEventPtr();
while (trace.GetNextEvent(ref current))
{
Debug.Log("Got some event: " + current);
}
// Also supports IEnumerable.
foreach (var eventPtr in trace)
Debug.Log("Got some event: " + eventPtr);
// Trace consumes unmanaged resources. Make sure you dispose it correctly to avoid memory leaks.
trace.Dispose();
```
To see events as they're processed, use this code:
```C#
InputSystem.onEvent +=
(eventPtr, device) =>
{
// Can handle events yourself, for example, and then stop them
// from further processing by marking them as handled.
eventPtr.handled = true;
};
```

View File

@@ -0,0 +1,598 @@
---
uid: input-system-devices
---
# Devices
Physically, Input Devices represent devices attached to the computer, which a user can use to control the app. Logically, Input Devices are the top-level container for [Controls](xref:input-system-controls). The [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) class is itself a specialization of [`InputControl`](xref:UnityEngine.InputSystem.InputControl). See [supported Devices](xref:input-system-supported-devices) to see what kind of Devices the Input System currently supports.
To query the set of all currently present Devices, you can use [`InputSystem.devices`](xref:UnityEngine.InputSystem.InputSystem.devices).
## Device descriptions
The Input System uses the device description defined as a [`InputDeviceDescription`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription) primarily during the Device discovery process. When a new Device is reported (by the runtime or by the user), the system then attempts to find a Device [layout](xref:input-system-layouts) that matches the Device description contained in the report. This process is based on [Device matching](#matching).
After a Device has been created, you can retrieve the description it was created from through the [`InputDevice.description`](xref:UnityEngine.InputSystem.InputDevice.description) property.
Every description has a set of standard fields:
|Field|Description|
|-----|-----------|
|[`interfaceName`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.interfaceName)|Identifier for the interface/API that is making the Device available. In many cases, this corresponds to the name of the platform, but there are several more specific interfaces that are commonly used: [HID](https://www.usb.org/hid), [RawInput](https://docs.microsoft.com/en-us/windows/desktop/inputdev/raw-input), [XInput](https://docs.microsoft.com/en-us/windows/desktop/xinput/xinput-game-controller-apis-portal).<br>This field is required.|
|[`deviceClass`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.deviceClass)|A broad categorization of the Device. For example, "Gamepad" or "Keyboard".|
|[`product`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.product)|Name of the product as reported by the Device/driver itself.|
|[`manufacturer`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.manufacturer)|Name of the manufacturer as reported by the Device/driver itself.|
|[`version`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.version)|If available, provides the version of the driver or hardware for the Device.|
|[`serial`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.serial)|If available, provides the serial number for the Device.|
|[`capabilities`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.capabilities)|A string in JSON format that describes Device/interface-specific capabilities. See the [section on capabilities](#capabilities).|
### Capabilities
Aside from a number of standardized fields, such as `product` and `manufacturer`, a Device description can contain a [`capabilities`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription.capabilities) string in JSON format. This string describes characteristics which help the Input System to interpret the data from a Device, and map it to Control representations. Not all Device interfaces report Device capabilities. Examples of interface-specific Device capabilities are [HID descriptors](xref:input-system-hid). WebGL, Android, and Linux use similar mechanisms to report available Controls on connected gamepads.
### Matching
[`InputDeviceMatcher`](xref:UnityEngine.InputSystem.Layouts.InputDeviceMatcher) instances handle matching an [`InputDeviceDescription`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription) to a registered layout. Each matcher loosely functions as a kind of regular expression. Each field in the description can be independently matched with either a plain string or regular expression. Matching is not case-sensitive. For a matcher to apply, all of its individual expressions have to match.
To matchers to any layout, call [`InputSystem.RegisterLayoutMatcher`](xref:UnityEngine.InputSystem.InputSystem.RegisterLayoutMatcher(System.String,UnityEngine.InputSystem.Layouts.InputDeviceMatcher)). You can also supply them when you register a layout.
```CSharp
// Register a new layout and supply a matcher for it.
InputSystem.RegisterLayoutMatcher<MyDevice>(
matches: new InputDeviceMatcher()
.WithInterface("HID")
.WithProduct("MyDevice.*")
.WithManufacturer("MyBrand");
// Register an alternate matcher for an already registered layout.
InputSystem.RegisterLayoutMatcher<MyDevice>(
new InputDeviceMatcher()
.WithInterface("HID")
```
If multiple matchers identifies the same [`InputDeviceDescription`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription), the Input System chooses the matcher that has the larger number of properties to match against.
#### Hijacking the matching process
You can overrule the internal matching process from outside to select a different layout for a Device than the system would normally choose. This also makes it possible to quickly build new layouts. To do this, add a custom handler to the [`InputSystem.onFindControlLayoutForDevice`](xref:UnityEngine.InputSystem.InputSystem.onFindLayoutForDevice) event. If your handler returns a non-null layout string, then the Input System uses this layout.
### Device lifecycle
#### Device creation
Once the system has chosen a [layout](xref:input-system-layouts) for a device, it instantiates an [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) and populates it with [`InputControls`](xref:UnityEngine.InputSystem.InputControl) as the layout dictates. This process is internal and happens automatically.
> [!NOTE]
> You can't create valid [`InputDevices`](xref:UnityEngine.InputSystem.InputDevice) and [`InputControls`](xref:UnityEngine.InputSystem.InputControl) by manually instantiating them with `new`. To guide the creation process, you must use [layouts](xref:input-system-layouts).
After the Input System assembles the [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice), it calls [`FinishSetup`](xref:UnityEngine.InputSystem.InputControl.FinishSetup*) on each control of the device and on the device itself. Use this to finalize the setup of the Controls.
After an [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) is fully assembled, the Input System adds it to the system. As part of this process, the Input System calls [`MakeCurrent`](xref:UnityEngine.InputSystem.InputDevice.MakeCurrent*) on the Device, and signals [`InputDeviceChange.Added`](xref:UnityEngine.InputSystem.InputDeviceChange.Added) on [`InputSystem.onDeviceChange`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange). The Input System also calls [`InputDevice.OnAdded`](xref:UnityEngine.InputSystem.InputDevice.OnAdded*).
Once added, the [`InputDevice.added`](xref:UnityEngine.InputSystem.InputDevice.added) flag is set to true.
To add devices manually, you can call one of the `InputSystem.AddDevice` methods such as [`InputSystem.AddDevice(layout)`](xref:UnityEngine.InputSystem.InputSystem.AddDevice(System.String,System.String,System.String)).
```CSharp
// Add a gamepad. This bypasses the matching process and creates a device directly
// with the Gamepad layout.
InputSystem.AddDevice<Gamepad>();
// Add a device such that the matching process is employed:
InputSystem.AddDevice(new InputDeviceDescription
{
interfaceName = "XInput",
product = "Xbox Controller",
});
```
When a device is added, the Input System automatically issues a [sync request](xref:UnityEngine.InputSystem.LowLevel.RequestSyncCommand) on the device. This instructs the device to send an event representing its current state. Whether this request succeeds depends on the whether the given device supports the sync command.
#### Device removal
When a Device is disconnected, it is removed from the system. A notification appears for [`InputDeviceChange.Removed`](xref:UnityEngine.InputSystem.InputDeviceChange) (sent via [`InputSystem.onDeviceChange`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange)) and the Devices are removed from the [`devices`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange) list. The system also calls [`InputDevice.OnRemoved`](xref:UnityEngine.InputSystem.InputDevice.OnRemoved*).
The [`InputDevice.added`](xref:UnityEngine.InputSystem.InputDevice.added) flag is reset to false in the process.
Note that Devices are not destroyed when removed. Device instances remain valid and you can still access them in code. However, trying to read values from the controls of these Devices leads to exceptions.
#### Device resets
Resetting a Device resets its Controls to their default state. You can do this manually using [`InputSystem.ResetDevice`](xref:UnityEngine.InputSystem.InputSystem.ResetDevice(UnityEngine.InputSystem.InputDevice,System.Boolean)):
```CSharp
InputSystem.ResetDevice(Gamepad.current);
```
There are two types of resets as determined by the second parameter to [`InputSystem.ResetDevice`](xref:UnityEngine.InputSystem.InputSystem.ResetDevice(UnityEngine.InputSystem.InputDevice,System.Boolean)):
|Reset Type|Description|
|----|-----------|
|**Soft** Resets|This is the default. With this type, only controls that are not marked as [`dontReset`](xref:input-system-layouts#control-items) are reset to their default value. This excludes controls such as [`Pointer.position`](xref:UnityEngine.InputSystem.Pointer.position) from resets and thus prevents mouse positions resetting to `(0,0)`.|
|**Hard** Resets|In this type, all controls are reset to their default value regardless of whether they have [`dontReset`](xref:input-system-layouts#control-items) set or not.|
Resetting Controls this way is visible on [Actions](xref:input-system-actions). If you reset a Device that is currently driving one or more Action, the Actions are cancelled. This cancellation is different from sending an event with default state. Whereas the latter may inadvertently [perform](xref:UnityEngine.InputSystem.InputAction.performed) Actions (for example, a button that was pressed would not appear to have been released), a reset will force clean cancellation.
Resets may be triggered automatically by the Input System depending on [application focus](#background-and-focus-change-behavior).
#### Device syncs
A Device may be requested to send an event with its current state through [`RequestSyncCommand`](xref:UnityEngine.InputSystem.LowLevel.RequestSyncCommand). It depends on the platform and type of Device whether this is supported or not.
A synchronization request can be explicitly sent using [`InputSystem.TrySyncDevice`](xref:UnityEngine.InputSystem.InputSystem.TrySyncDevice(UnityEngine.InputSystem.InputDevice)). If the device supports sync requests, the method returns true and an [`InputEvent`](xref:UnityEngine.InputSystem.LowLevel.InputEvent) will have been queued on the device for processing in the next [update](xref:UnityEngine.InputSystem.InputSystem.Update*).
Synchronization requests are also automatically sent by the Input System in certain situations. See [Background and focus change behavior](#background-and-focus-change-behavior) for more details.
#### Device enabling and disabling
When a Device is added, the Input System sends it an initial [`QueryEnabledStateCommand`](xref:UnityEngine.InputSystem.LowLevel.QueryEnabledStateCommand) to find out whether the device is currently enabled or not. The result of this is reflected in the [`InputDevice.enabled`](xref:UnityEngine.InputSystem.InputDevice.enabled) property.
When disabled, no events other than removal ([`DeviceRemoveEvent`](xref:UnityEngine.InputSystem.LowLevel.DeviceRemoveEvent)) and configuration change ([`DeviceConfigurationEvent`](xref:UnityEngine.InputSystem.LowLevel.DeviceConfigurationEvent)) events are processed for a Device, even if they are sent.
A Device can be manually disabled and re-enabled via [`InputSystem.DisableDevice`](xref:UnityEngine.InputSystem.InputSystem.DisableDevice*) and [`InputSystem.EnableDevice`](xref:UnityEngine.InputSystem.InputSystem.EnableDevice*) respectively.
Note that [sensors](xref:input-system-sensors) start in a disabled state by default, and you need to enable them in order for them to generate events.
The Input System may automatically disable and re-enable Devices in certain situations, as detailed in the [next section](#background-and-focus-change-behavior).
#### Background and focus change behavior
In general, input is tied to [application focus](https://docs.unity3d.com/ScriptReference/Application-isFocused.html). This means that Devices do not receive input while the application is not in the foreground and thus no [Actions](xref:input-system-actions) will receive input either. When the application comes back into focus, all devices will receive a [sync](#device-syncs) request to have them send their current state (which may have changed while the application was in the background) to the application. Devices that do not support sync requests will see a [soft reset](#device-resets) that resets all Controls not marked as [`dontReset`](xref:input-system-layouts#control-items) to their default state.
On platforms such as iOS and Android, that do not support running Unity applications in the background, this is the only supported behavior.
If the application is configured to run while in the background (that is, not having focus), input behavior can be selected from several options. This is supported in two scenarios:
* In Unity's [Player Settings](https://docs.unity3d.com/Manual/class-PlayerSettings.html) you can explicity enable `Run In Background` for specific players that support it (such as Windows or Mac standalone players). Note that in these players this setting is always enabled automatically in *development* players.
* In the editor, application focus is tied to focus on the Game View. If no Game View is focused, the application is considered to be running in the background. However, while in play mode, the editor will *always* keep running the player loop regardless of focus on the Game View window. This means that in the editor, `Run In Background` is considered to always be enabled.
If the application is configured this way to keep running while in the background, the player loop and thus the Input System, too, will keep running even when the application does not have focus. What happens with respect to input then depends on two factors:
1. On the ability of individual devices to receive input while the application is not running in the foreground. This is only supported by a small subset of devices and platforms. VR devices ([`TrackedDevice`](xref:UnityEngine.InputSystem.TrackedDevice)) such as HMDs and VR controllers generally support this.
To find out whether a specific device supports this, you can query the [`InputDevice.canRunInBackground`](xref:UnityEngine.InputSystem.InputDevice.canRunInBackground) property. This property can also be forced to true or false via a Device's [layout](xref:input-system-layouts#control-items).
> [!NOTE]
> [`InputDevice.canRunInBackground`](xref:UnityEngine.InputSystem.InputDevice.canRunInBackground) is overridden by the editor in certain situations (see table below). In general, the value of the property does not have to be the same between the editor and the player and depends on the specific platform and device.
2. On two settings you can find in the project-wide [Input Settings](xref:input-system-settings): [`InputSettings.backgroundBehavior`](xref:UnityEngine.InputSystem.InputSettings.backgroundBehavior) and [`InputSettings.editorInputBehaviorInPlayMode`](xref:UnityEngine.InputSystem.InputSettings.editorInputBehaviorInPlayMode). The table below shows a detailed breakdown of how input behaviors vary based on these two settings and in relation to the `Run In Background` player setting in Unity.
The following table shows the full matrix of behaviors according to the [Input Settings](xref:input-system-settings) and whether the game is running in the editor or in the player.
![Focus Behavior](Images/FocusBehavior.png)
#### Domain reloads in the Editor
The Editor reloads the C# application domain whenever it reloads and recompiles scripts, or when the Editor goes into Play mode. This requires the Input System to reinitialize itself after each domain reload. During this process, the Input System attempts to recreate devices that were instantiated before the domain reload. However, the state of each Device doesn't carry across, which means that Devices reset to their default state on domain reloads.
Note that layout registrations do not persist across domain reloads. Instead, the Input System relies on all registrations to become available as part of the initialization process (for example, by using `[InitializeOnLoad]` to run registration as part of the domain startup code in the Editor). This allows you to change registrations and layouts in script, and the change to immediately take effect after a domain reload.
## Native Devices
Devices that the [native backend](xref:input-system-architecture#native-backend) reports are considered native (as opposed to Devices created from script code). To identify these Devices, you can check the [`InputDevice.native`](xref:UnityEngine.InputSystem.InputDevice.native) property.
The Input System remembers native Devices. For example, if the system has no matching layout when the Device is first reported, but a layout which matches the device is registered later, the system uses this layout to recreate the Device.
You can force the Input System to use your own [layout](xref:input-system-layouts) when the native backend discovers a specific Device, by describing the Device in the layout, like this:
```
{
"name" : "MyGamepad",
"extend" : "Gamepad",
"device" : {
// All strings in here are regexs and case-insensitive.
"product" : "MyController",
"manufacturer" : "MyCompany"
}
}
```
> [!NOTE]
> You don't have to restart Unity in order for changes in your layout to take effect on native Devices. The Input System applies changes automatically on every domain reload, so you can just keep refining a layout and your Device is recreated with the most up-to-date version every time scripts are recompiled.
### Disconnected Devices
If you want to get notified when Input Devices disconnect, subscribe to the [`InputSystem.onDeviceChange`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange) event, and look for events of type [`InputDeviceChange.Disconnected`](xref:UnityEngine.InputSystem.InputDeviceChange).
The Input System keeps track of disconnected Devices in [`InputSystem.disconnectedDevices`](xref:UnityEngine.InputSystem.InputSystem.disconnectedDevices). If one of these Devices reconnects later, the Input System can detect that the Device was connected before, and reuses its [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) instance. This allows the [`PlayerInputManager`](xref:input-system-player-input-manager) to reassign the Device to the same [user](xref:input-system-user-management) again.
## Device IDs
Each Device that is created receives a unique numeric ID. You can access this ID through [`InputDevice.deviceId`](xref:UnityEngine.InputSystem.InputDevice.deviceId).
All IDs are only used once per Unity session.
## Device usages
Like any [`InputControl`](xref:UnityEngine.InputSystem.InputControl), a Device can have usages associated with it. You can query usages with the [`usages`](xref:UnityEngine.InputSystem.InputControl.usages) property, and use[`InputSystem.SetDeviceUsage()`](xref:UnityEngine.InputSystem.InputSystem.SetDeviceUsage(UnityEngine.InputSystem.InputDevice,System.String)) to set them. Usages can be arbitrary strings with arbitrary meanings. One common case where the Input System assigns Devices usages is the handedness of XR controllers, which are tagged with the "LeftHand" or "RightHand" usages.
## Device commands
While input [events](xref:input-system-events) deliver data from a Device, commands send data back to the Device. The Input System uses these to retrieve specific information from the Device, to trigger functions on the Device (such as rumble effects), and for a variety of other needs.
### Sending commands to Devices
The Input System sends commands to the Device through [`InputDevice.ExecuteCommand<TCommand>`](xref:UnityEngine.InputSystem.InputDevice.ExecuteCommand``1(``0@)). To monitor Device commands, use [`InputSystem.onDeviceCommand`](xref:UnityEngine.InputSystem.InputSystem.onDeviceCommand).
Each Device command implements the [`IInputDeviceCommandInfo`](xref:UnityEngine.InputSystem.LowLevel.IInputDeviceCommandInfo) interface, which only requires the [`typeStatic`](xref:UnityEngine.InputSystem.LowLevel.IInputDeviceCommandInfo.typeStatic) property to identify the type of the command. The native implementation of the Device should then understand how to handle that command. One common case is the `"HIDO"` command type which is used to send [HID output reports](xref:input-system-hid#hid-output) to HIDs.
### Adding custom device Commands
To create custom Device commands (for example, to support some functionality for a specific HID), create a `struct` that contains all the data to be sent to the Device, and add a [`typeStatic`](xref:UnityEngine.InputSystem.LowLevel.IInputDeviceCommandInfo.typeStatic) property to make that struct implement the [`IInputDeviceCommandInfo`](xref:UnityEngine.InputSystem.LowLevel.IInputDeviceCommandInfo) interface. To send data to a HID, this property should return `"HIDO"`.
You can then create an instance of this struct and populate all its fields, then use [`InputDevice.ExecuteCommand<TCommand>`](xref:UnityEngine.InputSystem.InputDevice.ExecuteCommand``1(``0@)) to send it to the Device. The data layout of the struct must match the native representation of the data as the device interprets it.
## Device state
Like any other type of [Control](xref:input-system-controls#control-state), each Device has a block of memory allocated to it which stores the state of all the Controls associated with the Device.
### State changes
State changes are usually initiated through [state events](xref:input-system-events#state-events) from the native backend, but you can use [`InputControl<>.WriteValueIntoState()`](xref:UnityEngine.InputSystem.InputControl`1.WriteValueIntoState(`0,System.Void*)) to manually overwrite the state of any Control.
#### Monitoring state changes
You can use [`InputState.AddChangeMonitor()`](xref:UnityEngine.InputSystem.LowLevel.InputState.AddChangeMonitor(UnityEngine.InputSystem.InputControl,System.Action{UnityEngine.InputSystem.InputControl,System.Double,UnityEngine.InputSystem.LowLevel.InputEventPtr,System.Int64},System.Int32,System.Action{UnityEngine.InputSystem.InputControl,System.Double,System.Int64,System.Int32})) to register a callback to be called whenever the state of a Control changes. The Input System uses the same mechanism to implement [input Actions](xref:input-system-actions).
#### Synthesizing state
The Input System can synthesize a new state from an existing state. An example of such a synthesized state is the [`press`](xref:UnityEngine.InputSystem.Pointer.press) button Control that [`Touchscreen`](xref:UnityEngine.InputSystem.Touchscreen) inherits from [`Pointer`](xref:UnityEngine.InputSystem.Pointer). Unlike a mouse, which has a physical button, for [`Touchscreen`](xref:UnityEngine.InputSystem.Touchscreen) this is a [synthetic Control](xref:input-system-controls#synthetic-controls) that doesn't correspond to actual data coming in from the Device backend. Instead, the Input System considers the button to be pressed if any touch is currently ongoing, and released otherwise.
To do this, the Input System uses [`InputState.Change`](xref:UnityEngine.InputSystem.LowLevel.InputState.Change``1(UnityEngine.InputSystem.InputControl,``0,UnityEngine.InputSystem.LowLevel.InputUpdateType,UnityEngine.InputSystem.LowLevel.InputEventPtr)), which allows feeding arbitrary state changes into the system without having to run them through the input event queue. The Input System incorporates state changes directly and synchronously. State change [monitors](#monitoring-state-changes) still trigger as expected.
## Working with Devices
### Monitoring Devices
To be notified when new Devices are added or existing Devices are removed, use [`InputSystem.onDeviceChange`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange).
```CSharp
InputSystem.onDeviceChange +=
(device, change) =>
{
switch (change)
{
case InputDeviceChange.Added:
// New Device.
break;
case InputDeviceChange.Disconnected:
// Device got unplugged.
break;
case InputDeviceChange.Connected:
// Plugged back in.
break;
case InputDeviceChange.Removed:
// Remove from Input System entirely; by default, Devices stay in the system once discovered.
break;
default:
// See InputDeviceChange reference for other event types.
break;
}
}
```
[`InputSystem.onDeviceChange`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange) delivers notifications for other device-related changes as well. See the [`InputDeviceChange` enum](xref:UnityEngine.InputSystem.InputDeviceChange) for more information.
### Adding and removing Devices
To manually add and remove Devices through the API, use [`InputSystem.AddDevice()`](xref:UnityEngine.InputSystem.InputSystem.AddDevice(UnityEngine.InputSystem.InputDevice)) and [`InputSystem.RemoveDevice()`](xref:UnityEngine.InputSystem.InputSystem.RemoveDevice(UnityEngine.InputSystem.InputDevice)).
This allows you to create your own Devices, which can be useful for testing purposes, or for creating virtual Input Devices which synthesize input from other events. As an example, see the [on-screen Controls](xref:input-system-on-screen) that the Input System provides. The Input Devices used for on-screen Controls are created entirely in code and have no [native representation](#native-devices).
### Creating custom Devices
> [!NOTE]
> This example deals only with Devices that have fixed layouts (that is, you know the specific model or models that you want to implement). This is different from an interface such as HID, where Devices can describe themselves through the interface and take on a wide variety of forms. A fixed Device layout can't cover self-describing Devices, so you need to use a [layout builder](xref:input-system-layouts#generated-layouts) to build Device layouts from information you obtain at runtime.
There are two main situations in which you might need to create a custom Device:
1. You have an existing API that generates input, and which you want to reflect into the Input System.
2. You have an HID that the Input System ignores, or that the Input system auto-generates a layout for that doesn't work well enough for your needs.
For the second scenario, see [Overriding the HID Fallback](xref:input-system-hid#creating-a-custom-device-layout).
The steps below deal with the first scenario, where you want to create a new Input Device entirely from scratch and provide input to it from a third-party API.
#### Step 1: The state struct
The first step is to create a C# `struct` that represents the form in which the system receives and stores input, and also describes the `InputControl` instances that the Input System must create for the Device in order to retrieve its state.
```CSharp
// A "state struct" describes the memory format that a Device uses. Each Device can
// receive and store memory in its custom format. InputControls then connect to
// the individual pieces of memory and read out values from them.
//
// If it's important for the memory format to match 1:1 at the binary level
// to an external representation, it's generally advisable to use
// LayoutLind.Explicit.
[StructLayout(LayoutKind.Explicit, Size = 32)]
public struct MyDeviceState : IInputStateTypeInfo
{
// You must tag every state with a FourCC code for type
// checking. The characters can be anything. Choose something that allows
// you to easily recognize memory that belongs to your own Device.
public FourCC format => new FourCC('M', 'Y', 'D', 'V');
// InputControlAttributes on fields tell the Input System to create Controls
// for the public fields found in the struct.
// Assume a 16bit field of buttons. Create one button that is tied to
// bit #3 (zero-based). Note that buttons don't need to be stored as bits.
// They can also be stored as floats or shorts, for example. The
// InputControlAttribute.format property determines which format the
// data is stored in. If omitted, the system generally infers it from the value
// type of the field.
[InputControl(name = "button", layout = "Button", bit = 3)]
public ushort buttons;
// Create a floating-point axis. If a name is not supplied, it is taken
// from the field.
[InputControl(layout = "Axis")]
public short axis;
}
```
The Input System's layout mechanism uses [`InputControlAttribute`](xref:UnityEngine.InputSystem.Layouts.InputControlAttribute) annotations to add Controls to the layout of your Device. For details, see the [layout system](xref:input-system-layouts) documentation.
With the state struct in place, you now have a way to send input data to the Input System and store it there. The next thing you need is an [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) that uses your custom state struct and represents your custom Device.
#### Step 2: The Device class
Next, you need a class derived from one of the [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) base classes. You can either base your Device directly on [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice), or you can pick a more specific Device type, like [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad).
This example assumes that your Device doesn't fit into any of the existing Device classes, so it derives directly from [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice).
```CSharp
// InputControlLayoutAttribute attribute is only necessary if you want
// to override the default behavior that occurs when you register your Device
// as a layout.
// The most common use of InputControlLayoutAttribute is to direct the system
// to a custom "state struct" through the `stateType` property. See below for details.
[InputControlLayout(displayName = "My Device", stateType = typeof(MyDeviceState))]
public class MyDevice : InputDevice
{
// In the state struct, you added two Controls that you now want to
// surface on the Device, for convenience. The Controls
// get added to the Device either way. When you expose them as properties,
// it is easier to get to the Controls in code.
public ButtonControl button { get; private set; }
public AxisControl axis { get; private set; }
// The Input System calls this method after it constructs the Device,
// but before it adds the device to the system. Do any last-minute setup
// here.
protected override void FinishSetup()
{
base.FinishSetup();
// NOTE: The Input System creates the Controls automatically.
// This is why don't do `new` here but rather just look
// the Controls up.
button = GetChildControl<ButtonControl>("button");
axis = GetChildControl<AxisControl>("axis");
}
}
```
#### Step 3: The Update method
You now have a Device in place along with its associated state format. You can call the following method to create a fully set-up Device with your two Controls on it:
```CSharp
InputSystem.AddDevice<MyDevice>();
```
However, this Device doesn't receive input yet, because you haven't added any code that generates input. To do that, you can use [`InputSystem.QueueStateEvent`](xref:UnityEngine.InputSystem.InputSystem.QueueStateEvent``1(UnityEngine.InputSystem.InputDevice,``0,System.Double)) or [`InputSystem.QueueDeltaStateEvent`](xref:UnityEngine.InputSystem.InputSystem.QueueDeltaStateEvent``1(UnityEngine.InputSystem.InputControl,``0,System.Double)) from anywhere, including from a thread. The following example uses [`IInputUpdateCallbackReceiver`](xref:UnityEngine.InputSystem.LowLevel.IInputUpdateCallbackReceiver), which, when implemented by any [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice), adds an [`OnUpdate()`](xref:UnityEngine.InputSystem.LowLevel.IInputUpdateCallbackReceiver.OnUpdate) method that automatically gets called during [`InputSystem.onBeforeUpdate`](xref:UnityEngine.InputSystem.InputSystem.onBeforeUpdate) and provides input events to the current input update.
> [!NOTE]
> If you already have a place where input for your device becomes available, you can skip this step and queue input events from there instead of using [`IInputUpdateCallbackReceiver`](xref:UnityEngine.InputSystem.LowLevel.IInputUpdateCallbackReceiver).
```CSharp
public class MyDevice : InputDevice, IInputUpdateCallbackReceiver
{
//...
public void OnUpdate()
{
// In practice, this would read out data from an external
// API. This example uses some empty input.
var state = new MyDeviceState();
InputSystem.QueueStateEvent(this, state);
}
}
```
#### Step 4: Device registration and creation
You now have a functioning device, but you haven't registered it (added it to the system) yet. This means you can't see the device when, for example, you create bindings in the [Action editor](xref:input-system-action-assets#editing-input-action-assets).
You can register your device type with the system from within the code that runs automatically as part of Unity's startup. To do so, modify the definition of `MyDevice` like so:
```CSharp
// Add the InitializeOnLoad attribute to automatically run the static
// constructor of the class after each C# domain load.
#if UNITY_EDITOR
[InitializeOnLoad]
#endif
public class MyDevice : InputDevice, IInputUpdateCallbackReceiver
{
//...
static MyDevice()
{
// RegisterLayout() adds a "Control layout" to the system.
// These can be layouts for individual Controls (like sticks)
// or layouts for entire Devices (which are themselves
// Controls) like in our case.
InputSystem.RegisterLayout<MyDevice>();
}
// You still need a way to trigger execution of the static constructor
// in the Player. To do this, you can add the RuntimeInitializeOnLoadMethod
// to an empty method.
[RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.BeforeSceneLoad)]
private static void InitializeInPlayer() {}
}
```
This registers the Device type with the system and makes it available in the Control picker. However, you still need a way to add an instance of the Device when it is connected.
In theory, you could call [`InputSystem.AddDevice<MyDevice>()`](xref:UnityEngine.InputSystem.InputSystem.AddDevice``1(System.String)) somewhere, but in a real-world setup you likely have to correlate the Input Devices you create with their identities in the third-party API.
It might be tempting to do something like this:
```CSharp
public class MyDevice : InputDevice, IInputUpdateCallbackReceiver
{
//...
// This does NOT work correctly.
public ThirdPartyAPI.DeviceId externalId { get; set; }
}
```
and then set that on the Device after calling [`AddDevice<MyDevice>`](xref:UnityEngine.InputSystem.InputSystem.AddDevice``1(System.String)). However, this doesn't work as expected in the Editor, because the Input System requires Devices to be created solely from their [`InputDeviceDescription`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription) in combination with the chosen layout (and layout variant). In addition, the system supports a fixed set of mutable per-device properties such as device usages (that is, [`InputSystem.SetDeviceUsage()`](xref:UnityEngine.InputSystem.InputSystem.SetDeviceUsage(UnityEngine.InputSystem.InputDevice,System.String)) and related methods). This allows the system to easily recreate Devices after domain reloads in the Editor, as well as to create replicas of remote Devices when connecting to a Player. To comply with this requirement, you must cast that information provided by the third-party API into an [`InputDeviceDescription`](xref:UnityEngine.InputSystem.Layouts.InputDeviceDescription) and then use an [`InputDeviceMatcher`](xref:UnityEngine.InputSystem.Layouts.InputDeviceMatcher) to match the description to our custom `MyDevice` layout.
This example assumes that the third-party API has two callbacks, like this:
```CSharp
public static ThirdPartyAPI
{
// This example assumes that the argument is a string that
// contains the name of the Device, and that no two Devices
// have the same name in the external API.
public static Action<string> deviceAdded;
public static Action<string> deviceRemoved;
}
```
You can hook into those callbacks and create and destroy devices in response.
```CSharp
// This example uses a MonoBehaviour with [ExecuteInEditMode]
// on it to run the setup code. You can do this many other ways.
[ExecuteInEditMode]
public class MyDeviceSupport : MonoBehaviour
{
protected void OnEnable()
{
ThirdPartyAPI.deviceAdded += OnDeviceAdded;
ThirdPartyAPI.deviceRemoved += OnDeviceRemoved;
}
protected void OnDisable()
{
ThirdPartyAPI.deviceAdded -= OnDeviceAdded;
ThirdPartyAPI.deviceRemoved -= OnDeviceRemoved;
}
private void OnDeviceAdded(string name)
{
// Feed a description of the Device into the system. In response, the
// system matches it to the layouts it has and creates a Device.
InputSystem.AddDevice(
new InputDeviceDescription
{
interfaceName = "ThirdPartyAPI",
product = name
});
}
private void OnDeviceRemoved(string name)
{
var device = InputSystem.devices.FirstOrDefault(
x => x.description == new InputDeviceDescription
{
interfaceName = "ThirdPartyAPI",
product = name,
});
if (device != null)
InputSystem.RemoveDevice(device);
}
// Move the registration of MyDevice from the
// static constructor to here, and change the
// registration to also supply a matcher.
protected void Awake()
{
// Add a match that catches any Input Device that reports its
// interface as "ThirdPartyAPI".
InputSystem.RegisterLayout<MyDevice>(
matches: new InputDeviceMatcher()
.WithInterface("ThirdPartyAPI"));
}
}
```
#### Step 5: `current` and `all` (optional)
For convenience, you can quickly access the last used device of a given type, or list all devices of a specific type. To do this, add support for a `current` and for an `all` getter to the API of `MyDevice`.
```CSharp
public class MyDevice : InputDevice, IInputCallbackReceiver
{
//...
public static MyDevice current { get; private set; }
public static IReadOnlyList<MyDevice> all => s_AllMyDevices;
private static List<MyDevice> s_AllMyDevices = new List<MyDevice>();
public override void MakeCurrent()
{
base.MakeCurrent();
current = this;
}
protected override void OnAdded()
{
base.OnAdded();
s_AllMyDevices.Add(this);
}
protected override void OnRemoved()
{
base.OnRemoved();
s_AllMyDevices.Remove(this);
}
}
```
#### Step 6: Device Commands (Optional)
A final, but optional, step is to add support for Device commands. A "device command" is that opposite of input. In other words, it consists of data traveling __to__ the input device, which might also return data as part of the operation (much like a function call). You can use this to communicate with the backend of the device in order to query configuration, or to initiate effects such as haptics. At the moment there isn't a proper interface available for this, however there are still some scenarios that can be solved with the current interfaces.
E.g. the following shows, when implementing a non-hardware-backed device (simulated device), how to simulate hardware reporting that the device can be run in the background and supports sync commands. This is useful to prevent the device from cancelling Actions when application focus is lost and restored. For more info see [Device syncs](#device-syncs)
```CSharp
public class MyDevice : InputDevice, IInputCallbackReceiver
{
//...
protected override unsafe long ExecuteCommand(InputDeviceCommand* commandPtr)
{
var type = commandPtr->type;
if (type == RequestSyncCommand.Type)
{
// Report that the device supports the sync command and has handled it.
// This will prevent device reset during focus changes.
result = InputDeviceCommand.GenericSuccess;
return true;
}
if (type == QueryCanRunInBackground.Type)
{
// Notify that the device supports running in the background.
((QueryCanRunInBackground*)commandPtr)->canRunInBackground = true;
result = InputDeviceCommand.GenericSuccess;
return true;
}
result = default;
return false;
}
}
```

View File

@@ -0,0 +1,9 @@
{
"folders": [
{
"path": "."
}
],
"settings": {
}
}

View File

@@ -0,0 +1,18 @@
---
uid: input-system-editor-features
---
# Input System Editor Features
This section describes how the Input System integrates with the Unity Editor, which allows you to read input in edit mode, debug input values, and set up automated input tests.
### [Using Input in the Editor](xref:input-system-use-in-editor)
Unlike Unity's old Input Manager, the Input System package allows you to read input from within [Editor window code](https://docs.unity3d.com/Manual/editor-EditorWindows.html) as well. ([Read more](xref:input-system-use-in-editor))
### [The Input Debugger](xref:input-system-debugging)
When something isn't working as expected, the quickest way to troubleshoot what's wrong is the Input Debugger in the Unity Editor. The Input Debugger provides access to the activity of the Input System in both the Editor and the connected Players. ([Read more](xref:input-system-debugging))
### [Automated Input Testing](xref:input-system-testing)
The Input System has built-in support for writing automated input tests. You can drive input entirely from code, without any dependencies on platform backends and physical hardware devices. The automated input tests you write consider the generated input to be the same as input generated at runtime by actual platform code. ([Read more](xref:input-system-testing))

View File

@@ -0,0 +1,321 @@
---
uid: input-system-events
---
# Input events
The Input System is event-driven. All input is delivered as events, and you can generate custom input by injecting events. You can also observe all source input by listening in on the events flowing through the system.
> [!NOTE]
> Events are an advanced, mostly internal feature of the Input System. Knowledge of the event system is mostly useful if you want to support custom Devices, or change the behavior of existing Devices.
Input events are a low-level mechanism. Usually, you don't need to deal with events if all you want to do is receive input for your app. Events are stored in unmanaged memory buffers and not converted to C# heap objects. The Input System provides wrapper APIs, but unsafe code is required for more involved event manipulations.
Note that there are no routing mechanism. The runtime delivers events straight to the Input System, which then incorporates them directly into the Device state.
Input events are represented by the [`InputEvent`](xref:UnityEngine.InputSystem.LowLevel.InputEvent) struct. Each event has a set of common properties:
|Property|Description|
|--------|-----------|
|[`type`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.type)|[`FourCC`](xref:UnityEngine.InputSystem.Utilities.FourCC) code that indicates what type of event it is.|
|[`eventId`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.eventId)|Unique numeric ID of the event.|
|[`time`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.time)|Timestamp of when the event was generated. This is on the same timeline as [`Time.realtimeSinceStartup`](https://docs.unity3d.com/ScriptReference/Time-realtimeSinceStartup.html).|
|[`deviceId`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.deviceId)|ID of the Device that the event targets.|
|[`sizeInBytes`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.sizeInBytes)|Total size of the event in bytes.|
You can observe the events received for a specific input device in the [input debugger](xref:input-system-debugging#debugging-devices).
## Types of events
### State events
A state event contains the input state for a Device. The Input System uses these events to feed new input to Devices.
There are two types of state events:
* [`StateEvent`](xref:UnityEngine.InputSystem.LowLevel.StateEvent) (`'STAT'`)
* [`DeltaStateEvent`](xref:UnityEngine.InputSystem.LowLevel.StateEvent) (`'DLTA'`)
[`StateEvent`](xref:UnityEngine.InputSystem.LowLevel.StateEvent) contains a full snapshot of the entire state of a Device in the format specific to that Device. The [`stateFormat`](xref:UnityEngine.InputSystem.LowLevel.StateEvent.stateFormat) field identifies the type of the data in the event. You can access the raw data using the [`state`](xref:UnityEngine.InputSystem.LowLevel.StateEvent.state) pointer and [`stateSizeInBytes`](xref:UnityEngine.InputSystem.LowLevel.StateEvent.stateSizeInBytes).
A [`DeltaStateEvent`](xref:UnityEngine.InputSystem.LowLevel.DeltaStateEvent) is like a [`StateEvent`](xref:UnityEngine.InputSystem.LowLevel.StateEvent), but only contains a partial snapshot of the state of a Device. The Input System usually sends this for Devices that require a large state record, to reduce the amount of memory it needs to update if only some of the Controls change their state. To access the raw data, you can use the [`deltaState`](xref:UnityEngine.InputSystem.LowLevel.DeltaStateEvent.deltaState) pointer and [`deltaStateSizeInBytes`](xref:UnityEngine.InputSystem.LowLevel.DeltaStateEvent.deltaStateSizeInBytes). The Input System should apply the data to the Device's state at the offset defined by [`stateOffset`](xref:UnityEngine.InputSystem.LowLevel.DeltaStateEvent.stateOffset).
### Device events
Device events indicate a change that is relevant to a Device as a whole. If you're interested in these events, it is usually more convenient to subscribe to the higher-level [`InputSystem.onDeviceChange`](xref:UnityEngine.InputSystem.InputSystem.onDeviceChange) event rather then processing [`InputEvents`](xref:UnityEngine.InputSystem.LowLevel.InputEvent) yourself.
There are three types of Device events:
* [`DeviceRemoveEvent`](xref:UnityEngine.InputSystem.LowLevel.DeviceRemoveEvent) (`'DREM'`)
* [`DeviceConfigurationEvent`](xref:UnityEngine.InputSystem.LowLevel.DeviceConfigurationEvent) (`'DCFG'`)
* [`DeviceResetEvent`](xref:UnityEngine.InputSystem.LowLevel.DeviceResetEvent) (`'DRST'`)
`DeviceRemovedEvent` indicates that a Device has been removed or disconnected. To query the device that has been removed, you can use the common [`deviceId`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.deviceId) field. This event doesn't have any additional data.
`DeviceConfigurationEvent` indicates that the configuration of a Device has changed. The meaning of this is Device-specific. This might signal, for example, that the layout used by the keyboard has changed or that, on a console, a gamepad has changed which player ID(s) it is assigned to. You can query the changed device from the common [`deviceId`](xref:UnityEngine.InputSystem.LowLevel.InputEvent.deviceId) field. This event doesn't have any additional data.
`DeviceResetEvent` indicates that a device should get reset. This will trigger [`InputSystem.ResetDevice`](xref:UnityEngine.InputSystem.InputSystem.ResetDevice(UnityEngine.InputSystem.InputDevice,System.Boolean)) to be called on the Device.
### Text events
[Keyboard](xref:input-system-keyboard) devices send these events to handle text input. If you're interested in these events, it's usually more convenient to subscribe to the higher-level [callbacks on the Keyboard class](xref:input-system-keyboard#text-input) rather than processing [`InputEvents`](xref:UnityEngine.InputSystem.LowLevel.InputEvent) yourself.
There are two types of text events:
* [`TextEvent`](xref:UnityEngine.InputSystem.LowLevel.TextEvent) (`'TEXT'`)
* [`IMECompositionEvent`](xref:UnityEngine.InputSystem.LowLevel.IMECompositionEvent) (`'IMES'`)
## Working with events
### Listening to events
If you want to do any monitoring or processing on incoming events yourself, subscribe to the [`InputSystem.onEvent`](xref:UnityEngine.InputSystem.InputSystem.onEvent) callback.
```CSharp
InputSystem.onEvent +=
(eventPtr, device) =>
{
Debug.Log($"Received event for {device}");
};
```
An [`IObservable`](https://docs.microsoft.com/en-us/dotnet/api/system.iobservable-1) interface is provided to more conveniently process events.
```CSharp
// Wait for first button press on a gamepad.
InputSystem.onEvent
.ForDevice<Gamepad>()
.Where(e => e.HasButtonPress())
.CallOnce(ctrl => Debug.Log($"Button {ctrl} pressed"));
```
To enumerate the controls that have value changes in an event, you can use [`InputControlExtensions.EnumerateChangedControls`](xref:UnityEngine.InputSystem.InputControlExtensions.EnumerateChangedControls(UnityEngine.InputSystem.LowLevel.InputEventPtr,UnityEngine.InputSystem.InputDevice,System.Single)).
```CSharp
InputSystem.onEvent
.Call(eventPtr =>
{
foreach (var control in eventPtr.EnumerateChangedControls())
Debug.Log($"Control {control} changed value to {control.ReadValueFromEventAsObject(eventPtr)}");
});
```
This is significantly more efficient than manually iterating over [`InputDevice.allControls`](xref:UnityEngine.InputSystem.InputDevice.allControls) and reading out the value of each control from the event.
### Reading state events
State events contain raw memory snapshots for Devices. As such, interpreting the data in the event requires knowledge about where and how individual state is stored for a given Device.
The easiest way to access state contained in a state event is to rely on the Device that the state is meant for. You can ask any Control to read its value from a given event rather than from its own internally stored state.
For example, the following code demonstrates how to read a value for [`Gamepad.leftStick`](xref:UnityEngine.InputSystem.Gamepad.leftStick) from a state event targeted at a [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad).
```CSharp
InputSystem.onEvent +=
(eventPtr, device) =>
{
// Ignore anything that isn't a state event.
if (!eventPtr.IsA<StateEvent>() && !eventPtr.IsA<DeltaStateEvent>())
return;
var gamepad = device as Gamepad;
if (gamepad == null)
{
// Event isn't for a gamepad or device ID is no longer valid.
return;
}
var leftStickValue = gamepad.leftStick.ReadValueFromEvent(eventPtr);
};
```
### Creating events
Anyone can create and queue new input events against any existing Device. Queueing an input event is thread-safe, which means that event generation can happen in background threads.
> [!NOTE]
> Unity allocates limited memory to events that come from background threads. If background threads produce too many events, queueing an event from a thread blocks the thread until the main thread flushes out the background event queue.
Note that queuing an event doesn't immediately consume the event. Event processing happens on the next update (depending on [`InputSettings.updateMode`](xref:input-system-settings#update-mode), it is triggered either manually via [`InputSystem.Update`](xref:UnityEngine.InputSystem.InputSystem.Update), or automatically as part of the Player loop).
#### Sending state events
For Devices that have a corresponding "state struct" describing the state of the device, the easiest way of sending input to the Device is to simply queue instances of those structs:
```CSharp
// Mouse.
InputSystem.QueueStateEvent(Mouse.current, new MouseState { position = new Vector2(123, 234) });
// Keyboard.
InputSystem.QueueStateEvent(Keyboard.current, new KeyboardState(Key.LeftCtrl, Key.A));
```
`Touchscreen` is somewhat special in that it expects its input to be in [`TouchState`](xref:UnityEngine.InputSystem.LowLevel.TouchState) format.
```CSharp
// Start touch.
InputSystem.QueueStateEvent(Touchscreen.current,
new TouchState { touchId = 1, phase = TouchPhase.Began, position = new Vector2(123, 234) });
// Move touch.
InputSystem.QueueStateEvent(Touchscreen.current,
new TouchState { touchId = 1, phase = TouchPhase.Moved, position = new Vector2(234, 345) });
// End touch.
InputSystem.QueueStateEvent(Touchscreen.current,
new TouchState { touchId = 1, phase = TouchPhase.Ended, position = new Vector2(123, 234) });
```
> [!IMPORTANT]
> [Touch IDs](xref:UnityEngine.InputSystem.Controls.TouchControl.touchId) cannot be 0! A valid touch must have a non-zero touch ID. Concurrent touches must each have a unique ID. After a touch has ended, its ID can be reused &ndash; although it is recommended to not do so.
If the exact format of the state used by a given Device is not known, the easiest way to send input to it is to simply create a [`StateEvent`](xref:UnityEngine.InputSystem.LowLevel.StateEvent) from the Device itself:
```CSharp
// `StateEvent.From` creates a temporary buffer in unmanaged memory that holds
// a state event large enough for the given device and contains a memory
// copy of the device's current state.
InputEventPtr eventPtr;
using (StateEvent.From(myDevice, out eventPtr))
{
((AxisControl) myDevice["myControl"]).WriteValueIntoEvent(0.5f, eventPtr);
InputSystem.QueueEvent(eventPtr);
}
```
Alternatively, you can send events for individual Controls.
```CSharp
// Send event to update leftStick on the gamepad.
InputSystem.QueueDeltaStateEvent(Gamepad.current.leftStick,
new Vector2(0.123f, 0.234f);
```
Note that delta state events only work for Controls that are both byte-aligned and a multiple of 8 bits in size in memory. You can't send a delta state event for a button Control that is stored as a single bit, for example.
### Capturing Events
> [!NOTE]
> To download a sample project which contains a reusable MonoBehaviour called `InputRecorder`, which can capture and replay input from arbitrary devices, open the Package Manager, select the Input System Package, and choose the sample project "Input Recorder" to download.
You can use the [`InputEventTrace`](xref:UnityEngine.InputSystem.LowLevel.InputEventTrace) class to record input events for later processing:
```CSharp
var trace = new InputEventTrace(); // Can also give device ID to only
// trace events for a specific device.
trace.Enable();
//... run stuff
var current = new InputEventPtr();
while (trace.GetNextEvent(ref current))
{
Debug.Log("Got some event: " + current);
}
// Also supports IEnumerable.
foreach (var eventPtr in trace)
Debug.Log("Got some event: " + eventPtr);
// Trace consumes unmanaged resources. Make sure to dispose.
trace.Dispose();
```
Dispose event traces after use, so that they do not leak memory on the unmanaged (C++) memory heap.
You can also write event traces out to files/streams, load them back in, and replay recorded streams.
```CSharp
// Set up a trace with such that it automatically grows in size as needed.
var trace = new InputEventTrace(growBuffer: true);
trace.Enable();
// ... capture some input ...
// Write trace to file.
trace.WriteTo("mytrace.inputtrace.");
// Load trace from same file.
var loadedTrace = InputEventTrace.LoadFrom("mytrace.inputtrace");
```
You can replay captured traces directly from [`InputEventTrace`](xref:UnityEngine.InputSystem.LowLevel.InputEventTrace) instances using the [`Replay`](xref:UnityEngine.InputSystem.LowLevel.InputEventTrace.Replay*) method.
```CSharp
// The Replay method returns a ReplayController that can be used to
// configure and control playback.
var controller = trace.Replay();
// For example, to not replay the events as is but rather create new devices and send
// the events to them, call WithAllDevicesMappedToNewInstances.
controller.WithAllDevicessMappedToNewInstances();
// Replay all frames one by one.
controller.PlayAllFramesOnyByOne();
// Replay events in a way that tries to simulate original event timing.
controller.PlayAllEventsAccordingToTimestamps();
```
## Processing events
[Events](xref:UnityEngine.InputSystem.LowLevel.InputEvent) are collected on a queue by the Unity runtime. This queue is regularly flushed out and the events on it processed. Events can be added to the queue manually by calling [`InputSystem.QueueEvent`](xref:UnityEngine.InputSystem.InputSystem.QueueEvent(UnityEngine.InputSystem.LowLevel.InputEventPtr)).
Each time input is processed, [`InputSystem.Update`](xref:UnityEngine.InputSystem.InputSystem.Update*) is called implicitly by the Unity runtime.
The interval at which this happens is determined by the ["Update Mode"](xref:input-system-settings#update-mode) configured in the settings. By default, input is processed in each frame __before__ <c>MonoBehaviour.Update</c> methods are called. If the setting is changed to process input in fixed updates, then this changes to input being processed each time before <c>MonoBehaviour.FixedUpdate</c> methods are called.
Normally, when input is processed, __all__ outstanding input events on the queue will be consumed. There are two exceptions to this, however.
When using [`UpdateMode.ProcessEventsInFixedUpdate`](xref:UnityEngine.InputSystem.InputSettings.UpdateMode.ProcessEventsInFixedUpdate), the Input System attempts to associate events with the timeslice of the corresponding <c>FixedUpdate</c>. This is based on the [timestamps](xref:UnityEngine.InputSystem.LowLevel.InputEvent.time) of the events and a "best effort" at calculating the corresponding timeslice of the current <c>FixedUpdated</c>.
The other exception are [`BeforeRender`](xref:UnityEngine.InputSystem.LowLevel.InputUpdateType.BeforeRender) updates. These updates are run after fixed or dynamic updates but before rendering and used used exclusively to update devices such as VR headsets that need the most up-to-date tracking data. Other input is not consumed from such updates and these updates are only enabled if such devices are actually present. `BeforeRender` updates are not considered separate frames as far as input is concerned.
> [!NOTE]
> Manually calling [`InputSystem.Update`](xref:UnityEngine.InputSystem.InputSystem.Update*) is strongly advised against except within tests employing [`InputTestFixture`](xref:UnityEngine.InputSystem.InputTestFixture) or when explicitly setting the system to [manual update mode](xref:UnityEngine.InputSystem.InputSettings.UpdateMode.ProcessEventsManually).
Methods such as [`InputAction.WasPerformedThisFrame`](xref:UnityEngine.InputSystem.InputAction.WasPerformedThisFrame) and [`InputAction.WasPerformedThisFrame`](xref:UnityEngine.InputSystem.InputAction.WasPerformedThisFrame) operate implicitly based on the [`InputSystem.Update`] cadence described above. Meaning, that they refer to the state as per the __last__ fixed/dynamic/manual update happened.
You can query the [current/last update type](xref:UnityEngine.InputSystem.LowLevel.InputState.currentUpdateType) and [count](xref:UnityEngine.InputSystem.LowLevel.InputState.updateCount) from [`InputState`](xref:UnityEngine.InputSystem.LowLevel.InputState).
### Merging of events
Input system uses event mering to reduce amount of events required to be processed.
This greatly improves performance when working with high refresh rate devices like 8000 Hz mice, touchscreens and others.
For example let's take a stream of 7 mouse events coming in the same update:
```
Mouse Mouse Mouse Mouse Mouse Mouse Mouse
Event no1 Event no2 Event no3 Event no4 Event no5 Event no6 Event no7
Time 1 Time 2 Time 3 Time 4 Time 5 Time 6 Time 7
Pos(10,20) Pos(12,21) Pos(13,23) Pos(14,24) Pos(16,25) Pos(17,27) Pos(18,28)
Delta(1,1) Delta(2,1) Delta(1,2) Delta(1,1) Delta(2,1) Delta(1,2) Delta(1,1)
BtnLeft(0) BtnLeft(0) BtnLeft(0) BtnLeft(1) BtnLeft(1) BtnLeft(1) BtnLeft(1)
```
To reduce workload we can skip events that are not encoding button state changes:
```
Mouse Mouse Mouse
Time 3 Time 4 Time 7
Event no3 Event no4 Event no7
Pos(13,23) Pos(14,24) Pos(18,28)
Delta(3,3) Delta(1,1) Delta(4,4)
BtnLeft(0) BtnLeft(1) BtnLeft(1)
```
In that case we combine no1, no2, no3 together into no3 and accumulate the delta,
then we keep no4 because it stores the transition from button unpressed to button pressed,
and it's important to keep the exact timestamp of such transition.
Later we combine no5, no6, no7 together into no7 because it is the last event in the update.
Currently this approach is implemented for:
- `FastMouse`, combines events unless `buttons` or `clickCount` differ in `MouseState`.
- `Touchscreen`, combines events unless `touchId`, `phaseId` or `flags` differ in `TouchState`.
You can disable merging of events by:
```
InputSystem.settings.disableRedundantEventsMerging = true;
```

View File

@@ -0,0 +1,230 @@
---
uid: input-system-gamepad
---
# Gamepad Support
A [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad) is narrowly defined as a device with two thumbsticks, a D-pad, and four face buttons. Additionally, gamepads usually have two shoulder and two trigger buttons. Most gamepads also have two buttons in the middle.
A gamepad can have additional controls, such as a gyro, which the device can expose. However, all gamepads are guaranteed to have at least the minimum set of controls described above.
Gamepad support guarantees the correct location and functioning of controls across platforms and hardware. For example, a PS4 DualShock controller layout should look identical regardless of which platform it is supported on. A gamepad's south face button should always be the lowermost face button.
> [!NOTE]
> Generic [HID](xref:input-system-hid) gamepads will __not__ be surfaced as [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad) devices but rather be created as generic [joysticks](xref:input-system-joystick). This is because the Input System cannot guarantee correct mapping of buttons and axes on the controller (the information is simply not available at the HID level). Only HID gamepads that are explicitly supported by the Input System (like the PS4 controller) will come out as gamepads. Note that you can set up the same kind of support for specific HID gamepads yourself (see ["Overriding the HID Fallback"](xref:input-system-hid#creating-a-custom-device-layout)).
>
> In case you want to use the gamepad for driving mouse input, there is a sample called `Gamepad Mouse Cursor` you can install from the package manager UI when selecting the Input System package. The sample demonstrates how to set up gamepad input to drive a virtual mouse cursor.
## Controls
Every gamepad has the following controls:
|Control|Type|Description|
|-------|----|-----------|
|[`leftStick`](xref:UnityEngine.InputSystem.Gamepad.leftStick)|[`StickControl`](xref:UnityEngine.InputSystem.Controls.StickControl)|Thumbstick on the left side of the gamepad. Deadzoned. Provides a normalized 2D motion vector. X is [-1..1] from left to right, Y is [-1..1] from bottom to top. Has up/down/left/right buttons for use like a D-pad.|
|[`rightStick`](xref:UnityEngine.InputSystem.Gamepad.rightStick)|[`StickControl`](xref:UnityEngine.InputSystem.Controls.StickControl)|Thumbstick on the right side of the gamepad. Deadzoned. Provides a normalized 2D motion vector. X is [-1..1] from left to right, Y is [-1..1] from bottom to top. Has up/down/left/right buttons for use like a D-pad.|
|[`dpad`](xref:UnityEngine.InputSystem.Gamepad.dpad)|[`DpadControl`](xref:UnityEngine.InputSystem.Controls.DpadControl)|The D-pad on the gamepad.|
|[`buttonNorth`](xref:UnityEngine.InputSystem.Gamepad.buttonNorth)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The upper button of the four action buttons, which are usually located on the right side of the gamepad. Labelled "Y" on Xbox controllers and "Triangle" on PlayStation controllers.|
|[`buttonSouth`](xref:UnityEngine.InputSystem.Gamepad.buttonSouth)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The lower button of the four action buttons, which are usually located on the right side of the gamepad. Labelled "A" on Xbox controllers and "Cross" on PlayStation controllers.|
|[`buttonWest`](xref:UnityEngine.InputSystem.Gamepad.buttonWest)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The left button of the four action buttons, which are usually located on the right side of the gamepad. Labelled "X" on Xbox controllers and "Square" on PlayStation controllers.|
|[`buttonEast`](xref:UnityEngine.InputSystem.Gamepad.buttonEast)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The right button of the four action buttons, which are usually located on the right side of the gamepad. Labelled "B" on Xbox controllers and "Circle" on PlayStation controllers.|
|[`leftShoulder`](xref:UnityEngine.InputSystem.Gamepad.leftShoulder)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The left shoulder button.|
|[`rightShoulder`](xref:UnityEngine.InputSystem.Gamepad.rightShoulder)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The right shoulder button.|
|[`leftTrigger`](xref:UnityEngine.InputSystem.Gamepad.leftTrigger)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The left trigger button.|
|[`rightTrigger`](xref:UnityEngine.InputSystem.Gamepad.rightTrigger)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The right trigger button.|
|[`startButton`](xref:UnityEngine.InputSystem.Gamepad.startButton)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The start button.|
|[`selectButton`](xref:UnityEngine.InputSystem.Gamepad.selectButton)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The select button.|
|[`leftStickButton`](xref:UnityEngine.InputSystem.Gamepad.leftStickButton)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The button pressed when the user presses down the left stick.|
|[`rightStickButton`](xref:UnityEngine.InputSystem.Gamepad.rightStickButton)|[`ButtonControl`](xref:UnityEngine.InputSystem.Controls.ButtonControl)|The button pressed when the user presses down the right stick.|
> [!NOTE]
> Buttons are also full floating-point axes. For example, the left and right triggers can function as buttons as well as full floating-point axes.
You can also access gamepad buttons using the indexer property on [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad.Item(UnityEngine.InputSystem.LowLevel.GamepadButton)) and the [`GamepadButton`](xref:UnityEngine.InputSystem.LowLevel.GamepadButton) enumeration:
```CSharp
Gamepad.current[GamepadButton.LeftShoulder];
```
Gamepads have both both Xbox-style and PS4-style aliases on buttons. For example, the following four accessors all retrieve the same "north" face button:
```CSharp
Gamepad.current[GamepadButton.Y]
Gamepad.current["Y"]
Gamepad.current[GamepadButton.Triangle]
Gamepad.current["Triangle"]
```
### Deadzones
Deadzones prevent accidental input due to slight variations in where gamepad sticks come to rest at their centre point. They allow a certain small inner area where the input is considered to be zero even if it is slightly off from the zero position.
To add a deadzone to gamepad stick, put a [stick deadzone Processor](ProcessorTypes.md#stick-deadzone) on the sticks, like this:
```JSON
{
"name" : "MyGamepad",
"extend" : "Gamepad",
"controls" : [
{
"name" : "leftStick",
"processors" : "stickDeadzone(min=0.125,max=0.925)"
},
{
"name" : "rightStick",
"processors" : "stickDeadzone(min=0.125,max=0.925)"
}
]
}
```
You can do the same in your C# state structs.
```C#
public struct MyDeviceState
{
[InputControl(processors = "stickDeadzone(min=0.125,max=0.925)"]
public StickControl leftStick;
[InputControl(processors = "stickDeadzone(min=0.125,max=0.925)"]
public StickControl rightStick;
}
```
The gamepad layout already adds stick deadzone processors which take their min and max values from [`InputSettings.defaultDeadzoneMin`](xref:UnityEngine.InputSystem.InputSettings.defaultDeadzoneMin) and [`InputSettings.defaultDeadzoneMax`](xref:UnityEngine.InputSystem.InputSettings.defaultDeadzoneMax).
## Polling
On Windows (XInput controllers only), Universal Windows Platform (UWP), and Switch, Unity polls gamepads explicitly rather than deliver updates as events.
The platform sets the default polling frequency to provide a good user experience for the devices supported on the platform. This frequency is guaranteed to be at least 60 Hz. You can override the polling frequency suggested by the target platform by explicitly setting [`InputSystem.pollingFrequency`](xref:UnityEngine.InputSystem.InputSystem.pollingFrequency) at runtime.
```CSharp
// Poll gamepads at 120 Hz.
InputSystem.pollingFrequency = 120;
```
Increased frequency should lead to an increased number of events on the respective devices. The timestamps provided on the events should roughly follow the spacing dictated by the polling frequency. Note, however, that the asynchronous background polling depends on OS thread scheduling and can vary.
## Rumble
The [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad) class implements the [`IDualMotorRumble`](xref:UnityEngine.InputSystem.Haptics.IDualMotorRumble) interface that allows you to control the left and right motor speeds. In most common gamepads, the left motor emits a low-frequency rumble, and the right motor emits a high-frequency rumble.
```CSharp
// Rumble the low-frequency (left) motor at 1/4 speed and the high-frequency
// (right) motor at 3/4 speed.
Gamepad.current.SetMotorSpeeds(0.25f, 0.75f);
```
> [!NOTE]
> Only the following combinations of devices/OSes currently support rumble:
>* PS4, Xbox, and Switch controllers, when connected to their respective consoles. Only supported if you install console-specific input packages in your Project.
>* PS4 controllers, when connected to Mac or Windows/UWP computers.
>* Xbox controllers on Windows.
[//]: # (TODO: are we missing any supported configs?)
### Pausing, resuming, and stopping haptics
[`IDualMotorRumble`](xref:UnityEngine.InputSystem.Haptics.IDualMotorRumble) is based on [`IHaptics`](xref:UnityEngine.InputSystem.Haptics.IHaptics), which is the base interface for any haptics support on any device. You can pause, resume, and reset haptic feedback using the [`PauseHaptics`](xref:UnityEngine.InputSystem.Haptics.IHaptics.PauseHaptics), [`ResumeHaptics`](xref:UnityEngine.InputSystem.Haptics.IHaptics.ResumeHaptics), and [`ResetHaptics`](xref:UnityEngine.InputSystem.Haptics.IHaptics.ResetHaptics) methods respectively.
In certain situations, you might want to globally pause or stop haptics for all devices. For example, if the player enters an in-game menu, you can pause haptics while the player is in the menu, and then resume haptics once the player resumes the game. You can use the corresponding methods on [`InputSystem`](xref:UnityEngine.InputSystem.InputSystem) to achieve this result. These methods work the same way as device-specific methods, but affect all devices:
```CSharp
// Pause haptics globally.
InputSystem.PauseHaptics();
// Resume haptics globally.
InputSystem.ResumeHaptics();
// Stop haptics globally.
InputSystem.ResetHaptics();
```
The difference between [`PauseHaptics`](xref:UnityEngine.InputSystem.InputSystem.PauseHaptics) and [`ResetHaptics`](xref:UnityEngine.InputSystem.InputSystem.ResetHaptics) is that the latter resets haptics playback state on each device to its initial state, whereas [`PauseHaptics`](xref:UnityEngine.InputSystem.InputSystem.PauseHaptics) preserves playback state in memory and only stops playback on the hardware.
## PlayStation controllers
PlayStation controllers are well supported on different devices. The Input System implements these as different derived types of the [`DualShockGamepad`](xref:UnityEngine.InputSystem.DualShock.DualShockGamepad) base class, which derives from [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad)):
* [`DualShock3GamepadHID`](xref:UnityEngine.InputSystem.DualShock.DualShock3GamepadHID): A DualShock 3 controller connected to a desktop computer using the HID interface. Currently only supported on macOS. Doesn't support [rumble](#rumble).
* [`DualShock4GamepadHID`](xref:UnityEngine.InputSystem.DualShock.DualShock4GamepadHID): A DualShock 4 controller connected to a desktop computer using the HID interface. Supported on macOS, Windows, UWP, and Linux.
*
* [`DualSenseGamepadHID`](xref:UnityEngine.InputSystem.DualShock.DualSenseGamepadHID): A DualSense controller connected to a desktop computer using the HID interface. Supported on macOS, Windows.
* [`DualShock4GampadiOS`](xref:UnityEngine.InputSystem.iOS.DualShock4GampadiOS): A DualShock 4 controller connected to an iOS device via Bluetooth. Requires iOS 13 or higher.
* [`SetLightBarColor(Color)`](xref:UnityEngine.InputSystem.DualShock.DualShockGamepad.SetLightBarColor(UnityEngine.Color)): Used to set the color of the light bar on the controller.
Note that, due to limitations in the USB driver and/or the hardware, only one IOCTL (input/output control) command can be serviced at a time. [`SetLightBarColor(Color)`](xref:UnityEngine.InputSystem.DualShock.DualShockGamepad.SetLightBarColor(UnityEngine.Color)) and [`SetMotorSpeeds(Single, Single)`](xref:UnityEngine.InputSystem.Gamepad.SetMotorSpeeds(System.Single,System.Single)) functionality on Dualshock 4 is implemented using IOCTL commands, and so if either method is called in quick succession, it is likely that only the first command will successfully complete. The other commands will be dropped. If there is a need to set both lightbar color and rumble motor speeds at the same time, use the [`SetMotorSpeedsAndLightBarColor(Single, Single, Color)`](xref:UnityEngine.InputSystem.DualShock.DualShock4GamepadHID.SetMotorSpeedsAndLightBarColor(System.Single,System.Single,UnityEngine.Color)) method.
> [!NOTE]
>* Unity supports PlayStation controllers on WebGL in some browser and OS configurations, but treats them as basic [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad) or [`Joystick`](xref:UnityEngine.InputSystem.Joystick) devices, and doesn't support rumble or any other DualShock-specific functionality.
>* Unity doesn't support connecting a PlayStation controller to a desktop machine using the DualShock 4 USB Wireless Adaptor. Use USB or Bluetooth to connect it.
## Xbox controllers
Xbox controllers are well supported on different devices. The Input System implements these using the [`XInputController`](xref:UnityEngine.InputSystem.XInput.XInputController) class, which derives from [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad). On Windows and UWP, Unity uses the XInput API to connect to any type of supported XInput controller, including all Xbox One or Xbox 360-compatible controllers. These controllers are represented as an [`XInputController`](xref:UnityEngine.InputSystem.XInput.XInputController) instance. You can query the [`XInputController.subType`](xref:UnityEngine.InputSystem.XInput.XInputController.subType) property to get information about the type of controller (for example, a wheel or a gamepad).
On other platforms Unity, uses derived classes to represent Xbox controllers:
* [`XboxGamepadMacOS`](xref:UnityEngine.InputSystem.XInput.XboxGamepadMacOS): Any Xbox or compatible gamepad connected to a Mac via USB using the [Xbox Controller Driver for macOS](https://github.com/360Controller/360Controller). This class is only used when the `360Controller` driver is in use, and as such you shouldn't see it in use on modern versions of macOS - it is provided primarily for legacy reasons, and for scenarios where macOS 10.15 may still be used.
* [`XboxGamepadMacOSNative`](xref:UnityEngine.InputSystem.XInput.XboxGamepadMacOSNative): Any Xbox gamepad connected to a Mac (macOS 11.0 or higher) via USB. On modern macOS versions, you will get this class instead of `XboxGamepadMacOS`
* [`XboxOneGampadMacOSWireless`](xref:UnityEngine.InputSystem.XInput.XboxOneGampadMacOSWireless): An Xbox One controller connected to a Mac via Bluetooth. Only the latest generation of Xbox One controllers supports Bluetooth. These controllers don't require any additional drivers in this scenario.
* [`XboxOneGampadiOS`](xref:UnityEngine.InputSystem.iOS.XboxOneGampadiOS): An Xbox One controller connected to an iOS device via Bluetooth. Requires iOS 13 or higher.
> [!NOTE]
> * XInput controllers on Mac currently require the installation of the [Xbox Controller Driver for macOS](https://github.com/360Controller/360Controller). This driver only supports USB connections, and doesn't support wireless dongles. However, the latest generation of Xbox One controllers natively support Bluetooth. Macs natively support these controllers as HIDs without any additional drivers when connected via Bluetooth.
> * Unity supports Xbox controllers on WebGL in some browser and OS configurations, but treats them as basic [`Gamepad`](xref:UnityEngine.InputSystem.Gamepad) or [`Joystick`](xref:UnityEngine.InputSystem.Joystick) devices, and doesn't support rumble or any other Xbox-specific functionality.
## Switch controllers
The Input System support Switch Pro controllers on desktop computers via the [`SwitchProControllerHID`](../api/UnityEngine.InputSystem.Switch.SwitchProControllerHID.html) class, which implements basic gamepad functionality.
> [!NOTE]
> This support does not currently work for Switch Pro controllers connected via wired USB. Instead, the Switch Pro controller *must* be connected via Bluetooth. This is due to the controller using a prioprietary communication protocol on top of HID which does not allow treating the controller like any other HID.
> [!NOTE]
> Switch Joy-Cons are not currently supported on desktop.
## Cursor Control
To give gamepads and joysticks control over a hardware or software cursor, you can use the [`VirtualMouseInput`](xref:UnityEngine.InputSystem.UI.VirtualMouseInput) component. See [`VirtualMouseInput` component](xref:input-system-ui-support#virtual-mouse-cursor-control) in the UI section of the manual.
## Discover all connected devices
There are various ways to discover the currently connected devices, as shown in the code samples below.
To query a list of all connected devices (does not allocate; read-only access):
```
InputSystem.devices
```
To get notified when a device is added or removed:
```
InputSystem.onDeviceChange +=
(device, change) =>
{
if (change == InputDeviceChange.Added || change == InputDeviceChange.Removed)
{
Debug.Log($"Device '{device}' was {change}");
}
}
```
To find all gamepads and joysticks:
```
var devices = InputSystem.devices;
for (var i = 0; i < devices.Count; ++i)
{
var device = devices[i];
if (device is Joystick || device is Gamepad)
{
Debug.Log("Found " + device);
}
}
```

View File

@@ -0,0 +1,318 @@
---
uid: input-system-hid
---
# HID Support
Human Interface Device (HID) is a [specification](https://www.usb.org/hid) to describe peripheral user input devices connected to computers via USB or Bluetooth. HID is commonly used to implement devices such as gamepads, joysticks, or racing wheels.
The Input System directly supports HID (connected via both USB and Bluetooth) on Windows, MacOS, and the Universal Windows Platform (UWP). The system might support HID on other platforms, but not deliver input through HID-specific APIs. For example, on Linux, the system supports gamepad and joystick HIDs through SDL, but doesn't support other HIDs.
Every HID comes with a device descriptor. To browse through the descriptor of an HID from the Input Debugger, click the __HID Descriptor__ button in the device debugger window. To specify the type of the device, the HID descriptor reports entry numbers in the [HID usage tables](https://www.usb.org/document-library/hid-usage-tables-112), and a list of all controls on the device, along with their data ranges and usages.
![HID Descriptor](Images/HIDDescriptor.png)
The Input System handles HIDs in one of two ways:
1. The system has a known layout for the specific HID.
2. If the system does not have a known layout, it auto-generates one for the HID.
## Auto-generated layouts
By default, the Input System creates layouts and device representations for any HID which reports its usage as `GenericDesktop/Joystick`, `GenericDesktop/Gamepad`, or `GenericDesktop/MultiAxisController` (see the [HID usage table specifications](https://www.usb.org/document-library/hid-usage-tables-112) for more information). To change the list of supported usages, set [`HIDSupport.supportedHIDUsages`](xref:UnityEngine.InputSystem.HID.HIDSupport.supportedHIDUsages).
When the Input System automatically creates a layout for an HID, it always reports these Devices as [`Joysticks`](xref:input-system-joystick), represented by the [`Joystick` device class](xref:UnityEngine.InputSystem.Joystick). The first elements with a reported HID usage of `GenericDesktop/X` and `GenericDesktop/Y` together form the joystick's [`stick`](xref:UnityEngine.InputSystem.Joystick.stick) control. The system then adds Controls for all further HID axis or button elements, using the control names reported by the HID specification. The Input System assigns the first control with an HID usage of `Button/Button 1` to the joystick's [`trigger`](xref:UnityEngine.InputSystem.Joystick.trigger) control.
The auto-generated layouts represent a "best effort" on the part of the Input System. The way Human Interface Devices describe themselves in accordance with the HID standard is too ambiguous in practice, so generated layouts might lead to Controls that don't work as expected. For example, while the layout builder can identify hat switches and D-pads, it can often only make guesses as to which direction represents which. The same goes for individual buttons, which generally aren't assigned any meaning in HID.
The best way to resolve the situation of HIDs not working as expected is to add a custom layout, which bypasses auto-generation altogether. See [Creating a custom device layout](#creating-a-custom-device-layout) for details.
## HID output
HIDs can support output (for example, to toggle lights or force feedback motors on a gamepad). Unity controls output by sending HID Output Report commands to a device. Output reports use device-specific data formats. To use HID Output Reports, call [`InputDevice.ExecuteCommand`](xref:UnityEngine.InputSystem.InputDevice.ExecuteCommand``1(``0@)) to send a command struct with the [`typeStatic`](xref:UnityEngine.InputSystem.LowLevel.IInputDeviceCommandInfo.typeStatic) property set as `"HIDO"` to a device. The command struct contains the device-specific data sent out to the HID.
## Creating a custom device layout
Often, when using the layouts auto-generated for HIDs, the result isn't ideal. Controls don't receive proper names specific to the device, some Controls might not work as expected, and some Controls that use vendor-specific formats might not appear altogether.
The best way to deal with this is to override the HID fallback and set up a custom device layout specifically for your device. This overrides the default auto-generation and gives you full control over how the device is exposed.
Below are three example workflows showing different ways to achieve this.
- [Example 1 - Use an existing C# InputDevice](#custom-device-workflow-example-1---use-an-existing-c-inputdevice)
- [Example 2 - Create your own InputDevice class](#custom-device-workflow-example-2---create-your-own-inputdevice-class)
- [Example 3 - A more complex example using the PS4 DualShock Controller](#custom-device-workflow-example-3---ps4-dualshock-controller)
### Custom Device Workflow Example 1 - Use an existing C# InputDevice
If you want to use one of the existing C# [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) classes in code to interface with your device, you can build on an existing layout using JSON:
```
{
"name" : "MyDevice",
"extend" : "Gamepad", // Or some other thing
"controls" : [
{
"name" : "firstButton",
"layout" : "Button",
"offset" : 0,
"bit": 0,
"format" : "BIT",
},
{
"name" : "secondButton",
"layout" : "Button",
"offset" : 0,
"bit": 1,
"format" : "BIT",
},
{
"name" : "axis",
"layout" : "Axis",
"offset" : 4,
"format" : "FLT",
"parameters" : "clamp=true,clampMin=0,clampMax=1"
}
]
}
```
You then register your layout with the system and then instantiate it:
```C#
InputSystem.RegisterControlLayout(myDeviceJson);
var device = InputSystem.AddDevice("MyDevice");
```
### Custom Device Workflow Example 2 - Create your own InputDevice class
Alternatively, you can create your own [`InputDevice`](xref:UnityEngine.InputSystem.InputDevice) class and state layouts in C#.
```C#
public struct MyDeviceState : IInputStateTypeInfo
{
// FourCC type codes are used to identify the memory layouts of state blocks.
public FourCC format => new FourCC('M', 'D', 'E', 'V');
[InputControl(name = "firstButton", layout = "Button", bit = 0)]
[InputControl(name = "secondButton", layout = "Button", bit = 1)]
public int buttons;
[InputControl(layout = "Analog", parameters="clamp=true,clampMin=0,clampMax=1")]
public float axis;
}
[InputState(typeof(MyDeviceState)]
public class MyDevice : InputDevice
{
public ButtonControl firstButton { get; private set; }
public ButtonControl secondButton { get; private set; }
public AxisControl axis { get; private set; }
protected override void FinishSetup(InputControlSetup setup)
{
firstButton = setup.GetControl<ButtonControl>(this, "firstButton");
secondButton = setup.GetControl<ButtonControl>(this, "secondButton");
axis = setup.GetControl<AxisControl>(this, "axis");
base.FinishSetup(setup);
}
}
```
To create an instance of your device, register it as a layout and then instantiate it:
```C#
InputSystem.RegisterControlLayout("MyDevice", typeof(MyDevice));
InputSystem.AddDevice("MyDevice");
```
### Custom Device Workflow Example 3 - PS4 DualShock Controller
This example workflow uses the same technique as the previous example, but provides more detail by using the PS4 DualShock controller as a more complex device to set up.
The following example assumes that the Input System doesn't already have a custom layout for the PS4 DualShock controller, and that you want to add such a layout.
In this example, you want to expose the controller as a [`Gamepad`](xref:input-system-gamepad) and you roughly know the HID data format used by the device.
> [!TIP]
> If you don't know the format of a given HID you want to support, you can open the Input Debugger with the device plugged in and open both the debugger view for the device and the window showing the HID descriptor. Then, you can go through the Controls one by one, see what happens in the debug view, and correlate that to the Controls in the HID descriptor. You can also double-click individual events and compare the raw data coming in from the device. If you select two events in the event trace, you can then right-click them and choose __Compare__ to open a window that shows only the differences between the two events.
#### Step 1: The state struct
The first step is to describe in detail what format that input data for the device comes in, as well as the [`InputControl`](xref:UnityEngine.InputSystem.InputControl) instances that should read out individual pieces of information from that data.
The HID input reports from the PS4 controller look approximately like this:
```C++
struct PS4InputReport
{
byte reportId; // #0
byte leftStickX; // #1
byte leftStickY; // #2
byte rightStickX; // #3
byte rightStickY; // #4
byte dpad : 4; // #5 bit #0 (0=up, 2=right, 4=down, 6=left)
byte squareButton : 1; // #5 bit #4
byte crossButton : 1; // #5 bit #5
byte circleButton : 1; // #5 bit #6
byte triangleButton : 1; // #5 bit #7
byte leftShoulder : 1; // #6 bit #0
byte rightShoulder : 1; // #6 bit #1
byte leftTriggerButton : 2;// #6 bit #2
byte rightTriggerButton : 2;// #6 bit #3
byte shareButton : 1; // #6 bit #4
byte optionsButton : 1; // #6 bit #5
byte leftStickPress : 1; // #6 bit #6
byte rightStickPress : 1; // #6 bit #7
byte psButton : 1; // #7 bit #0
byte touchpadPress : 1; // #7 bit #1
byte padding : 6;
byte leftTrigger; // #8
byte rightTrigger; // #9
}
```
You can translate this into a C# struct:
```CSharp
// We receive data as raw HID input reports. This struct
// describes the raw binary format of such a report.
[StructLayout(LayoutKind.Explicit, Size = 32)]
struct DualShock4HIDInputReport : IInputStateTypeInfo
{
// Because all HID input reports are tagged with the 'HID ' FourCC,
// this is the format we need to use for this state struct.
public FourCC format => new FourCC('H', 'I', 'D');
// HID input reports can start with an 8-bit report ID. It depends on the device
// whether this is present or not. On the PS4 DualShock controller, it is
// present. We don't really need to add the field, but let's do so for the sake of
// completeness. This can also help with debugging.
[FieldOffset(0)] public byte reportId;
// The InputControl annotations here probably look a little scary, but what we do
// here is relatively straightforward. The fields we add we annotate with
// [FieldOffset] to force them to the right location, and then we add InputControl
// to attach controls to the fields. Each InputControl attribute can only do one of
// two things: either it adds a new control or it modifies an existing control.
// Given that our layout is based on Gamepad, almost all the controls here are
// inherited from Gamepad, and we just modify settings on them.
[InputControl(name = "leftStick", layout = "Stick", format = "VC2B")]
[InputControl(name = "leftStick/x", offset = 0, format = "BYTE",
parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5")]
[InputControl(name = "leftStick/left", offset = 0, format = "BYTE",
parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0,clampMax=0.5,invert")]
[InputControl(name = "leftStick/right", offset = 0, format = "BYTE",
parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0.5,clampMax=1")]
[InputControl(name = "leftStick/y", offset = 1, format = "BYTE",
parameters = "invert,normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5")]
[InputControl(name = "leftStick/up", offset = 1, format = "BYTE",
parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0,clampMax=0.5,invert")]
[InputControl(name = "leftStick/down", offset = 1, format = "BYTE",
parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0.5,clampMax=1,invert=false")]
[FieldOffset(1)] public byte leftStickX;
[FieldOffset(2)] public byte leftStickY;
[InputControl(name = "rightStick", layout = "Stick", format = "VC2B")]
[InputControl(name = "rightStick/x", offset = 0, format = "BYTE", parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5")]
[InputControl(name = "rightStick/left", offset = 0, format = "BYTE", parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0,clampMax=0.5,invert")]
[InputControl(name = "rightStick/right", offset = 0, format = "BYTE", parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0.5,clampMax=1")]
[InputControl(name = "rightStick/y", offset = 1, format = "BYTE", parameters = "invert,normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5")]
[InputControl(name = "rightStick/up", offset = 1, format = "BYTE", parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0,clampMax=0.5,invert")]
[InputControl(name = "rightStick/down", offset = 1, format = "BYTE", parameters = "normalize,normalizeMin=0,normalizeMax=1,normalizeZero=0.5,clamp,clampMin=0.5,clampMax=1,invert=false")]
[FieldOffset(3)] public byte rightStickX;
[FieldOffset(4)] public byte rightStickY;
[InputControl(name = "dpad", format = "BIT", layout = "Dpad", sizeInBits = 4, defaultState = 8)]
[InputControl(name = "dpad/up", format = "BIT", layout = "DiscreteButton", parameters = "minValue=7,maxValue=1,nullValue=8,wrapAtValue=7", bit = 0, sizeInBits = 4)]
[InputControl(name = "dpad/right", format = "BIT", layout = "DiscreteButton", parameters = "minValue=1,maxValue=3", bit = 0, sizeInBits = 4)]
[InputControl(name = "dpad/down", format = "BIT", layout = "DiscreteButton", parameters = "minValue=3,maxValue=5", bit = 0, sizeInBits = 4)]
[InputControl(name = "dpad/left", format = "BIT", layout = "DiscreteButton", parameters = "minValue=5, maxValue=7", bit = 0, sizeInBits = 4)]
[InputControl(name = "buttonWest", displayName = "Square", bit = 4)]
[InputControl(name = "buttonSouth", displayName = "Cross", bit = 5)]
[InputControl(name = "buttonEast", displayName = "Circle", bit = 6)]
[InputControl(name = "buttonNorth", displayName = "Triangle", bit = 7)]
[FieldOffset(5)] public byte buttons1;
[InputControl(name = "leftShoulder", bit = 0)]
[InputControl(name = "rightShoulder", bit = 1)]
[InputControl(name = "leftTriggerButton", layout = "Button", bit = 2)]
[InputControl(name = "rightTriggerButton", layout = "Button", bit = 3)]
[InputControl(name = "select", displayName = "Share", bit = 4)]
[InputControl(name = "start", displayName = "Options", bit = 5)]
[InputControl(name = "leftStickPress", bit = 6)]
[InputControl(name = "rightStickPress", bit = 7)]
[FieldOffset(6)] public byte buttons2;
[InputControl(name = "systemButton", layout = "Button", displayName = "System", bit = 0)]
[InputControl(name = "touchpadButton", layout = "Button", displayName = "Touchpad Press", bit = 1)]
[FieldOffset(7)] public byte buttons3;
[InputControl(name = "leftTrigger", format = "BYTE")]
[FieldOffset(8)] public byte leftTrigger;
[InputControl(name = "rightTrigger", format = "BYTE")]
[FieldOffset(9)] public byte rightTrigger;
[FieldOffset(30)] public byte batteryLevel;
}
```
#### Step 2: The InputDevice
Next, you need an `InputDevice` to represent your device. Because you're dealing with a gamepad, you must create a new subclass of `Gamepad`.
For simplicity, this example ignores the fact that there is a `DualShockGamepad` class that the actual `DualShockGamepadHID` is based on.
```CSharp
// Using InputControlLayoutAttribute, we tell the system about the state
// struct we created, which includes where to find all the InputControl
// attributes that we placed on there. This is how the Input System knows
// what controls to create and how to configure them.
[InputControlLayout(stateType = typeof(DualShock4HIDInputReport)]
public DualShock4GamepadHID : Gamepad
{
}
```
#### Step 3: Registering the device
The last step is to register your new type of device and set up the Input System so that when a PS4 controller is connected, the Input System generates your custom device instead of using the default HID fallback.
This only requires a call to [`InputSystem.RegisterLayout<T>`](xref:UnityEngine.InputSystem.InputSystem.RegisterLayout``1(System.String,System.Nullable{UnityEngine.InputSystem.Layouts.InputDeviceMatcher})), giving it an [`InputDeviceMatcher`](xref:UnityEngine.InputSystem.Layouts.InputDeviceMatcher) that matches the description for a PS4 DualShock HID. In theory, you can place this call anywhere, but the best point for registering layouts is generally during startup. Doing so ensures that your custom layout is visible to the Unity Editor and therefore exposed, for example, in the Input Control picker.
You can insert your registration into the startup sequence by modifying the code for your `DualShock4GamepadHID` device as follows:
```CSharp
[InputControlLayout(stateType = typeof(DualShock4HIDInputReport)]
#if UNITY_EDITOR
[InitializeOnLoad] // Make sure static constructor is called during startup.
#endif
public DualShock4GamepadHID : Gamepad
{
static DualShock4GamepadHID()
{
// This is one way to match the device.
InputSystem.RegisterLayout<DualShock4GamepadHID>(
new InputDeviceMatcher()
.WithInterface("HID")
.WithManufacturer("Sony.+Entertainment")
.WithProduct("Wireless Controller"));
// Alternatively, you can also match by PID and VID, which is generally
// more reliable for HIDs.
InputSystem.RegisterLayout<DualShock4GamepadHID>(
matches: new InputDeviceMatcher()
.WithInterface("HID")
.WithCapability("vendorId", 0x54C) // Sony Entertainment.
.WithCapability("productId", 0x9CC)); // Wireless controller.
}
// In the Player, to trigger the calling of the static constructor,
// create an empty method annotated with RuntimeInitializeOnLoadMethod.
[RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.BeforeSceneLoad)]
static void Init() {}
}
```
Your custom layout now picks up any device that matches the manufacturer and product name strings, or the vendor and product IDs in its HID descriptor. The Input System now represents a `DualShock4GamepadHID` device instance.
For more information, refer to [Device matching](xref:input-system-devices#matching).

View File

@@ -0,0 +1,55 @@
---
uid: input-system-how-do-i
---
# How do I…?
A collection of frequently asked questions, and where to find their answers in the documentation.
> [!NOTE]
> If you're new to the Input System and have landed on this page looking for documentation, it's best to read the [QuickStart Guide](xref:input-system-quickstart), and the [Concepts](xref:basic-concepts) and [Workflows](xref:input-system-workflows) pages, so that you can make sure you're choosing the best workflow for your project's input requirements.
>
> This is because there are a number of different ways to read input using the Input System, and many of the answers on this page give you the quickest but least flexible solution, and may not be suitable for a project with more complex requirements.
How do I...?
- [check if a specific key or button was pressed this frame?](xref:UnityEngine.InputSystem.Controls.ButtonControl.wasPressedThisFrame)
- [check if any key or button was pressed](xref:UnityEngine.InputSystem.InputSystem.onAnyButtonPress)
- [find all connected gamepads?](xref:UnityEngine.InputSystem.Gamepad.all)
- [find the gamepad that the player is currently using?](xref:UnityEngine.InputSystem.Gamepad.current)
- [know when a new device was plugged in?](xref:input-system-devices#monitoring-devices)
- [create my own custom devices?](xref:input-system-hid#creating-a-custom-device-layout)
- create a simple "Fire" type action?</br>
Use the same techniques shown for the "Jump" action in the [Workflows section](xref:input-system-workflows)
- [require a button to be held down for some duration before triggering an action?](Interactions.html#hold)
- [use a "positive" and a "negative" button to drive an axis?](ActionBindings.html#1d-axis)
- [create a UI to rebind input in my game?](xref:UnityEngine.InputSystem.InputActionRebindingExtensions)
- [set up an Action to specifically target the left-hand XR controller?](xref:UnityEngine.InputSystem.XR.XRController.leftHand)
- [make my left-hand XR controller my right-hand one?](xref:UnityEngine.InputSystem.XR.XRController.leftHand)
- [get all current touches from the touchscreen?](xref:input-system-touch#reading-all-touches)
- [deal with my gamepad data arriving in a format different from `GamepadState`?](xref:UnityEngine.InputSystem.LowLevel.GamepadState)
- [force the Input System to use my own layout when the native backend discovers a specific Device?](xref:input-system-devices#native-devices)
- [add deadzoning to my gamepad sticks?](xref:input-system-gamepad#deadzones)
- [give my head tracking an extra update before rendering?](xref:UnityEngine.InputSystem.XR.XRHMD)
- [record events flowing through the system?](xref:input-system-debugging#other-tips)
- [see events as they're processed?](xref:input-system-debugging#other-tips)
- [see what Devices I have and what state they're in?](Debugging.html#debugging-devices)

View File

@@ -0,0 +1,84 @@
---
uid: input-system-processors
---
# Apply Processors
The following are the three different ways of applying Processors to input events. For more information on the general usage of Processors, refer to [Using Processors](UsingProcessors.md).
* [Processors on Bindings](#processors-on-bindings)
* [Processors on Actions](#processors-on-actions)
* [Processors on Controls](#processors-on-controls)
## Processors on Bindings
When you create Bindings for your [actions](xref:input-system-actions), you can choose to add Processors to the Bindings. These process the values from the controls they bind to, before the system applies them to the Action value. For instance, you might want to invert the `Vector2` values from the controls along the Y-axis before passing these values to the Action that drives the input logic for your application. To do this, you can add an [Invert Vector2](ProcessorTypes.md#invert-vector-2) Processor to your Binding.
If you're using Actions defined in the [Input Actions Editor](xref:input-system-configuring-input), or in an [Action Asset](xref:input-system-action-assets), you can add any Processor to your Bindings in the Input Action editor:
1. Select the Binding you want to add Processors to so that the Binding Properties panel shows up on the right side.
2. Select the **Add (+)** icon on the __Processors__ foldout to open a list of all available Processors that match your control type.
3. Choose a Processor type to add a Processor instance of that type. The Processor now appears under the __Processors__ foldout.
4. (Optional) If the Processor has any parameters, you can edit them in the __Processors__ foldout.
![Binding Processors](Images/BindingProcessors.png)
To remove a Processor, click the Remove (-) icon next to it. You can also use the up and down arrows to change the order of Processors. This affects the order in which the system processes values.
If you create your Bindings in code, you can add Processors like this:
```CSharp
var action = new InputAction();
action.AddBinding("<Gamepad>/leftStick")
.WithProcessor("invertVector2(invertX=false)");
```
## Processors on Actions
Processors on Actions work in the same way as Processors on Bindings, but they affect all controls bound to an Action, rather than just the controls from a specific Binding. If there are Processors on both the Binding and the Action, the system processes the ones from the Binding first.
You can add and edit Processors on Actions in the [Input Actions Editor](xref:input-system-configuring-input), or in an [Action Asset](xref:input-system-action-assets) the [same way](#processors-on-bindings) as you would for Bindings: select an Action to edit, then add one or more Processors in the right window pane.
If you create your Actions in code, you can add Processors like this:
```CSharp
var action = new InputAction(processors: "invertVector2(invertX=false)");
```
## Processors on Controls
You can have any number of Processors directly on an [`InputControl`](xref:UnityEngine.InputSystem.InputControl), which then process the values read from the Control. Whenever you call [`ReadValue`](xref:UnityEngine.InputSystem.InputControl`1.ReadValue) on a Control, all Processors on that Control process the value before it gets returned to you. You can use [`ReadUnprocessedValue`](xref:UnityEngine.InputSystem.InputControl`1.ReadUnprocessedValue) on a Control to bypass the Processors.
The Input System adds Processors to a Control during device creation, if they're specified in the Control's [layout](xref:input-system-layouts). You can't add Processors to existing Controls after they've been created, so you can only add Processors to Controls when you're [creating custom devices](xref:input-system-devices#creating-custom-devices). The devices that the Input System supports out of the box already have some useful Processors added on their Controls. For instance, sticks on gamepads have a [Stick Deadzone](ProcessorTypes.md#stick-deadzone) Processor.
If you're using a layout generated by the Input System from a [state struct](xref:input-system-devices#step-1-the-state-struct) using [`InputControlAttributes`](xref:UnityEngine.InputSystem.Layouts.InputControlAttribute), you can specify the Processors you want to use via the [`processors`](xref:UnityEngine.InputSystem.Layouts.InputControlAttribute.processors) property of the attribute, like this:
```CSharp
public struct MyDeviceState : IInputStateTypeInfo
{
public FourCC format => return new FourCC('M', 'Y', 'D', 'V');
// Add an axis deadzone to the Control to ignore values
// smaller then 0.2, as our Control does not have a stable
// resting position.
[InputControl(layout = "Axis", processors = "AxisDeadzone(min=0.2)")]
public short axis;
}
```
If you [create a layout from JSON](xref:input-system-layouts#layout-from-json), you can specify Processors on your Controls like this:
```CSharp
{
"name" : "MyDevice",
"extend" : "Gamepad", // Or some other thing
"controls" : [
{
"name" : "axis",
"layout" : "Axis",
"offset" : 4,
"format" : "FLT",
"processors" : "AxisDeadzone(min=0.2)"
}
]
}
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 125 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 82 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 309 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 286 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 610 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 921 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 69 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 166 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 150 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 72 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 913 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 913 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Some files were not shown because too many files have changed in this diff Show More