You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

PlayerBrain.cs 3.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106
  1. using System.Collections.Generic;
  2. using UnityEngine;
  3. using UnityEngine.Serialization;
  4. namespace MLAgents
  5. {
  6. /// <summary>
  7. /// Implemetation of the Player Brain. Inherits from the base class Brain. Allows the user to
  8. /// manually select decisions for linked agents by creating a mapping from keys presses to
  9. /// actions.
  10. /// You can use Player Brains to control a "teacher" Agent that trains other Agents during
  11. /// imitation learning. You can also use Player Brains to test your Agents and environment
  12. /// before training agents with reinforcement learning.
  13. /// </summary>
  14. [CreateAssetMenu(fileName = "NewPlayerBrain", menuName = "ML-Agents/Player Brain")]
  15. public class PlayerBrain : Brain
  16. {
  17. [System.Serializable]
  18. public struct DiscretePlayerAction
  19. {
  20. public KeyCode key;
  21. public int branchIndex;
  22. public int value;
  23. }
  24. [System.Serializable]
  25. public struct KeyContinuousPlayerAction
  26. {
  27. public KeyCode key;
  28. public int index;
  29. public float value;
  30. }
  31. [System.Serializable]
  32. public struct AxisContinuousPlayerAction
  33. {
  34. public string axis;
  35. public int index;
  36. public float scale;
  37. }
  38. [SerializeField]
  39. [FormerlySerializedAs("continuousPlayerActions")]
  40. [Tooltip("The list of keys and the value they correspond to for continuous control.")]
  41. /// Contains the mapping from input to continuous actions
  42. public KeyContinuousPlayerAction[] keyContinuousPlayerActions;
  43. [SerializeField]
  44. [Tooltip("The list of axis actions.")]
  45. /// Contains the mapping from input to continuous actions
  46. public AxisContinuousPlayerAction[] axisContinuousPlayerActions;
  47. [SerializeField]
  48. [Tooltip("The list of keys and the value they correspond to for discrete control.")]
  49. /// Contains the mapping from input to discrete actions
  50. public DiscretePlayerAction[] discretePlayerActions;
  51. protected override void Initialize(){ }
  52. /// Uses the continuous inputs or dicrete inputs of the player to
  53. /// decide action
  54. protected override void DecideAction()
  55. {
  56. if (brainParameters.vectorActionSpaceType == SpaceType.continuous)
  57. {
  58. foreach (Agent agent in agentInfos.Keys)
  59. {
  60. var action = new float[brainParameters.vectorActionSize[0]];
  61. foreach (KeyContinuousPlayerAction cha in keyContinuousPlayerActions)
  62. {
  63. if (Input.GetKey(cha.key))
  64. {
  65. action[cha.index] = cha.value;
  66. }
  67. }
  68. foreach (AxisContinuousPlayerAction axisAction in axisContinuousPlayerActions)
  69. {
  70. var axisValue = Input.GetAxis(axisAction.axis);
  71. axisValue *= axisAction.scale;
  72. if (Mathf.Abs(axisValue) > 0.0001)
  73. {
  74. action[axisAction.index] = axisValue;
  75. }
  76. }
  77. agent.UpdateVectorAction(action);
  78. }
  79. }
  80. else
  81. {
  82. foreach (Agent agent in agentInfos.Keys)
  83. {
  84. var action = new float[brainParameters.vectorActionSize.Length];
  85. foreach (DiscretePlayerAction dha in discretePlayerActions)
  86. {
  87. if (Input.GetKey(dha.key))
  88. {
  89. action[dha.branchIndex] = (float) dha.value;
  90. }
  91. }
  92. agent.UpdateVectorAction(action);
  93. }
  94. }
  95. agentInfos.Clear();
  96. }
  97. }
  98. }