diff --git a/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockAgentGridCollab.prefab b/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockAgentGridCollab.prefab index a268aa2fbe..26308e1b86 100644 --- a/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockAgentGridCollab.prefab +++ b/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockAgentGridCollab.prefab @@ -9,7 +9,7 @@ GameObject: serializedVersion: 6 m_Component: - component: {fileID: 2709359580712052713} - - component: {fileID: 2709359580712052712} + - component: {fileID: 1548337883655231979} m_Layer: 0 m_Name: GridSensor m_TagString: Untagged @@ -31,7 +31,7 @@ Transform: m_Father: {fileID: 2708762399863795223} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &2709359580712052712 +--- !u!114 &1548337883655231979 MonoBehaviour: m_ObjectHideFlags: 0 m_CorrespondingSourceObject: {fileID: 0} @@ -40,44 +40,36 @@ MonoBehaviour: m_GameObject: {fileID: 2709359580712052714} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 801669c0cdece6b40b2e741ad0b119ac, type: 3} + m_Script: {fileID: 11500000, guid: 2a501962d056745d1a30e99146ee39fe, type: 3} m_Name: m_EditorClassIdentifier: - Name: AgentGrid - CellScaleX: 1 - CellScaleZ: 1 - GridNumSideX: 20 - GridNumSideZ: 20 - CellScaleY: 0.5 - RotateToAgent: 1 - ChannelDepth: 07000000 - DetectableObjects: + m_SensorName: GridSensor + m_CellScale: {x: 1, y: 0.5, z: 1} + m_GridSize: {x: 20, y: 1, z: 20} + m_RotateWithAgent: 1 + m_DetectableTags: - wall - agent - goal - blockSmall - blockLarge - blockVeryLarge - ObserveMask: + m_ColliderMask: serializedVersion: 2 m_Bits: 1 - gridDepthType: 1 - rootReference: {fileID: 2710286047221272849} - MaxColliderBufferSize: 500 - InitialColliderBufferSize: 16 - ObservationPerCell: 7 - NumberOfObservations: 2800 - ChannelOffsets: 00000000 - DebugColors: + m_MaxColliderBufferSize: 500 + m_InitialColliderBufferSize: 16 + m_DebugColors: - {r: 0, g: 0, b: 0, a: 0} - - {r: 0, g: 0.51824737, b: 1, a: 1} - - {r: 0.4680206, g: 0.7058824, b: 0.35155708, a: 1} - - {r: 1, g: 0.99570733, b: 0.984, a: 1} - - {r: 0.4811321, g: 0.4811321, b: 0.4811321, a: 1} - - {r: 0.3584906, g: 0.3584906, b: 0.3584906, a: 0} - GizmoYOffset: 0 - ShowGizmos: 0 - CompressionType: 1 + - {r: 0, g: 0.5176471, b: 1, a: 0} + - {r: 0.46666667, g: 0.7058824, b: 0.3529412, a: 0} + - {r: 1, g: 0.99607843, b: 0.9843137, a: 0} + - {r: 0.48235294, g: 0.48235294, b: 0.48235294, a: 0} + - {r: 0.35686275, g: 0.35686275, b: 0.35686275, a: 0} + m_GizmoYOffset: 0 + m_ShowGizmos: 0 + m_CompressionType: 1 + m_ObservationStacks: 1 --- !u!1 &2709573194145405553 GameObject: m_ObjectHideFlags: 0 @@ -132,6 +124,7 @@ MeshRenderer: m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: @@ -143,6 +136,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 + m_ReceiveGI: 1 m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 @@ -209,6 +203,7 @@ MeshRenderer: m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: @@ -220,6 +215,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 + m_ReceiveGI: 1 m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 @@ -286,6 +282,7 @@ MeshRenderer: m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: @@ -297,6 +294,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 + m_ReceiveGI: 1 m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 @@ -532,6 +530,7 @@ MeshRenderer: m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: @@ -543,6 +542,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 + m_ReceiveGI: 1 m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 @@ -609,6 +609,7 @@ MeshRenderer: m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 + m_RayTracingMode: 2 m_RenderingLayerMask: 1 m_RendererPriority: 0 m_Materials: @@ -620,6 +621,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 + m_ReceiveGI: 1 m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 diff --git a/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockCollabAreaGrid.prefab b/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockCollabAreaGrid.prefab index 7331ebe7ad..c0bbee555e 100644 --- a/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockCollabAreaGrid.prefab +++ b/Project/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockCollabAreaGrid.prefab @@ -1486,47 +1486,6 @@ MonoBehaviour: onTriggerExitEvent: m_PersistentCalls: m_Calls: [] ---- !u!114 &1809664679221531284 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8191066182862526894} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2a501962d056745d1a30e99146ee39fe, type: 3} - m_Name: - m_EditorClassIdentifier: - m_SensorName: GridSensor - m_CellScale: {x: 1, y: 0.01, z: 1} - m_GridSize: {x: 20, y: 1, z: 20} - m_RotateWithAgent: 1 - m_DetectableTags: - - wall - - agent - - goal - - blockSmall - - blockLarge - - blockVeryLarge - m_ColliderMask: - serializedVersion: 2 - m_Bits: 1 - m_MaxColliderBufferSize: 500 - m_InitialColliderBufferSize: 16 - m_DebugColors: - - {r: 0, g: 0, b: 0, a: 0} - - {r: 0, g: 0.5176471, b: 1, a: 0} - - {r: 0.46666667, g: 0.7058824, b: 0.3529412, a: 0} - - {r: 1, g: 0.99607843, b: 0.9843137, a: 0} - - {r: 0.48235294, g: 0.48235294, b: 0.48235294, a: 0} - - {r: 0.35686275, g: 0.35686275, b: 0.35686275, a: 0} - m_GizmoYOffset: 0 - m_ShowGizmos: 0 - m_CompressionType: 1 - m_ObservationStacks: 1 - m_UseOneHotTag: 1 - m_CountColliders: 0 --- !u!1 &8191066182918326564 GameObject: m_ObjectHideFlags: 0 @@ -2214,47 +2173,6 @@ MonoBehaviour: UseRandomAgentPosition: 1 UseRandomBlockRotation: 1 UseRandomBlockPosition: 1 ---- !u!114 &4609315540733531199 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 8696048509000480032} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2a501962d056745d1a30e99146ee39fe, type: 3} - m_Name: - m_EditorClassIdentifier: - m_SensorName: GridSensor - m_CellScale: {x: 1, y: 0.01, z: 1} - m_GridSize: {x: 20, y: 1, z: 20} - m_RotateWithAgent: 1 - m_DetectableTags: - - wall - - agent - - goal - - blockSmall - - blockLarge - - blockVeryLarge - m_ColliderMask: - serializedVersion: 2 - m_Bits: 1 - m_MaxColliderBufferSize: 500 - m_InitialColliderBufferSize: 16 - m_DebugColors: - - {r: 0, g: 0, b: 0, a: 0} - - {r: 0, g: 0.5176471, b: 1, a: 0} - - {r: 0.46666667, g: 0.7058824, b: 0.3529412, a: 0} - - {r: 1, g: 0.99607843, b: 0.9843137, a: 0} - - {r: 0.48235294, g: 0.48235294, b: 0.48235294, a: 0} - - {r: 0.35686275, g: 0.35686275, b: 0.35686275, a: 0} - m_GizmoYOffset: 0 - m_ShowGizmos: 0 - m_CompressionType: 1 - m_ObservationStacks: 1 - m_UseOneHotTag: 1 - m_CountColliders: 0 --- !u!1 &8821353056066081524 GameObject: m_ObjectHideFlags: 0 @@ -2336,47 +2254,6 @@ MonoBehaviour: m_FallbackScreenDPI: 96 m_DefaultSpriteDPI: 96 m_DynamicPixelsPerUnit: 1 ---- !u!114 &6319243058783963332 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_CorrespondingSourceObject: {fileID: 0} - m_PrefabInstance: {fileID: 0} - m_PrefabAsset: {fileID: 0} - m_GameObject: {fileID: 9116780590443581137} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2a501962d056745d1a30e99146ee39fe, type: 3} - m_Name: - m_EditorClassIdentifier: - m_SensorName: GridSensor - m_CellScale: {x: 1, y: 0.01, z: 1} - m_GridSize: {x: 20, y: 1, z: 20} - m_RotateWithAgent: 1 - m_DetectableTags: - - wall - - agent - - goal - - blockSmall - - blockLarge - - blockVeryLarge - m_ColliderMask: - serializedVersion: 2 - m_Bits: 1 - m_MaxColliderBufferSize: 500 - m_InitialColliderBufferSize: 16 - m_DebugColors: - - {r: 0, g: 0, b: 0, a: 0} - - {r: 0, g: 0.5176471, b: 1, a: 0} - - {r: 0.46666667, g: 0.7058824, b: 0.3529412, a: 0} - - {r: 1, g: 0.99607843, b: 0.9843137, a: 0} - - {r: 0.48235294, g: 0.48235294, b: 0.48235294, a: 0} - - {r: 0.35686275, g: 0.35686275, b: 0.35686275, a: 0} - m_GizmoYOffset: 0 - m_ShowGizmos: 0 - m_CompressionType: 1 - m_ObservationStacks: 1 - m_UseOneHotTag: 1 - m_CountColliders: 0 --- !u!1001 &6067781793364901444 PrefabInstance: m_ObjectHideFlags: 0 @@ -2453,12 +2330,6 @@ PrefabInstance: m_RemovedComponents: - {fileID: 2709359580712052712, guid: ac01d0f42c5e1463e943632a60d99967, type: 3} m_SourcePrefab: {fileID: 100100000, guid: ac01d0f42c5e1463e943632a60d99967, type: 3} ---- !u!1 &8191066182862526894 stripped -GameObject: - m_CorrespondingSourceObject: {fileID: 2709359580712052714, guid: ac01d0f42c5e1463e943632a60d99967, - type: 3} - m_PrefabInstance: {fileID: 6067781793364901444} - m_PrefabAsset: {fileID: 0} --- !u!114 &8190299122290044756 stripped MonoBehaviour: m_CorrespondingSourceObject: {fileID: 2710286047221272848, guid: ac01d0f42c5e1463e943632a60d99967, @@ -2558,12 +2429,6 @@ PrefabInstance: m_RemovedComponents: - {fileID: 2709359580712052712, guid: ac01d0f42c5e1463e943632a60d99967, type: 3} m_SourcePrefab: {fileID: 100100000, guid: ac01d0f42c5e1463e943632a60d99967, type: 3} ---- !u!1 &9116780590443581137 stripped -GameObject: - m_CorrespondingSourceObject: {fileID: 2709359580712052714, guid: ac01d0f42c5e1463e943632a60d99967, - type: 3} - m_PrefabInstance: {fileID: 6565363751102736699} - m_PrefabAsset: {fileID: 0} --- !u!114 &9115291448867436587 stripped MonoBehaviour: m_CorrespondingSourceObject: {fileID: 2710286047221272848, guid: ac01d0f42c5e1463e943632a60d99967, @@ -2663,12 +2528,6 @@ PrefabInstance: m_RemovedComponents: - {fileID: 2709359580712052712, guid: ac01d0f42c5e1463e943632a60d99967, type: 3} m_SourcePrefab: {fileID: 100100000, guid: ac01d0f42c5e1463e943632a60d99967, type: 3} ---- !u!1 &8696048509000480032 stripped -GameObject: - m_CorrespondingSourceObject: {fileID: 2709359580712052714, guid: ac01d0f42c5e1463e943632a60d99967, - type: 3} - m_PrefabInstance: {fileID: 6716844123244810954} - m_PrefabAsset: {fileID: 0} --- !u!114 &8695281997955662810 stripped MonoBehaviour: m_CorrespondingSourceObject: {fileID: 2710286047221272848, guid: ac01d0f42c5e1463e943632a60d99967, diff --git a/com.unity.ml-agents.extensions/Documentation~/Grid-Sensor.md b/com.unity.ml-agents.extensions/Documentation~/Grid-Sensor.md deleted file mode 100644 index d043da9516..0000000000 --- a/com.unity.ml-agents.extensions/Documentation~/Grid-Sensor.md +++ /dev/null @@ -1,230 +0,0 @@ -# Summary - -The Grid Sensor is an alternative method for collecting observations which combines the generality of data extraction from Raycasts with the image processing power of Convolutional Neural Networks. The Grid Sensor can be used to collect data in the general form of a "Width x Height x Channel" matrix which can be used for training agent policies or for data analysis. - - - -# Motivation - -In ML-Agents there are two main sensors for observing information that is "physically" around the agent. - -**Raycasts** - -Raycasts provide the agent the ability to see things along prespecified lines of sight, similar to LIDAR. The kind of data it can extract is open to the developer from things like: - -* The type of an object (enemy, npc, etc) -* The health of a unit -* the damage-per-second of a weapon on the ground - -Raycasts are simple to implement and provides enough information for most simple games. When few are used, they are also computationally lightweight. However, there are multiple limiting factors: - -* The rays need to be at the same height as the things the agent should observe. -* Objects can remain hidden by line of sight and if the knowledge of those objects is crucial to the success of the agent, then this limitation must be compensated for by the agents networks capacity (i.e., need a bigger brain with memory). -* The order of the raycasts (one raycast being to the left/right of another) is thrown away at the model level and must be learned by the agent which extends training time. Multiple raycasts exacerbates this issue. -* Typically, the length of the raycasts is limited because the agent need not know about objects that are at the other side of the level. Combined with few raycasts for computational efficiency, this means that an agent may not observe objects that fall between these rays and the issue becomes worse as the objects reduce in size. - -**Camera** - -The Camera provides the agent with either a grayscale or an RGB image of the game environment. In many cases, what we want to extract from a set of pixels is invariant to the location of those pixels in the image. It is this intuition that helps form the basis of Convolutional Neural Networks (CNNs) and established the literature of designing networks that take advantage of these relationships between pixels. Following this established literature of CNNs on image based data, the ML-Agent's Camera Sensor provides a means by which the agent can include high dimensional inputs (images) into its observation stream. -However the Camera Sensor has its own drawbacks as well. - -* It requires rendering the scene and thus is computationally slower than alternatives that do not use rendering. -* If the textures of the important objects in the game are updated, the agent needs to be retrained. -* The RGB of the camera only provides a maximum of three channels to the agent. - -These limitations provided the motivation towards the development of the Grid Sensor and Grid Observations as described below. - -# Contribution - -An image can be thought of as a matrix of a predefined width (W) and a height (H) and each pixel can be thought of as simply an array of length 3 (in the case of RGB), `[Red, Green, Blue]` holding the different channel information of the color (channel) intensities at that pixel location. Thus an image is just a 3 dimensional matrix of size WxHx3. A Grid Observation can be thought of as a generalization of this setup where in place of a pixel there is a "cell" which is an array of length N representing different channel intensities at that cell position. From a Convolutional Neural Network point of view, the introduction of multiple channels in an "image" isn't a new concept. One such example is using an RGB-Depth image which is used in several robotics applications. The distinction of Grid Observations is what the data within the channels represents. Instead of limiting the channels to color intensities, the channels within a cell of a Grid Observation generalize to any data that can be represented by a single number (float or int). - -Before jumping into the details of the Grid Sensor, an important thing to note is the agent performance and qualitatively different behavior over raycasts. Unity MLAgent's comes with a suite of example environments. One in particular, the [Food Collector](https://github.com/Unity-Technologies/ml-agents/tree/release_16_docs/docs/Learning-Environment-Examples.md#food-collector), has been the focus of the Grid Sensor development. - -The Food Collector environment can be described as: -* Set-up: A multi-agent environment where agents compete to collect food. -* Goal: The agents must learn to collect as many green food spheres as possible while avoiding red spheres. -* Agents: The environment contains 5 agents with same Behavior Parameters. - -When applying the Grid Sensor to this environment, in place of the Raycast Vector Sensor or the Camera Sensor, a Mean Reward of 40-50 is observed. This performance is on par with what is seen by agents trained with RayCasts but the side-by-side comparison of trained agents, shows a qualitative difference in behavior. A deeper study and interpretation of the qualitative differences between agents trained with Raycasts and Vector Sensors verses Grid Sensors is left to future studies. - - - -## Overview - -There are three main phases to the observation process of the Grid Sensor: - -1. **Collection** - data is extracted from observed objects -2. **Encoding** - the extracted data is encoded into a grid observation -3. **Communication** - the grid observation is sent to python or used by a trained model - -These phases are described in the following sections. - -## Collection - -A Grid Sensor is the Grid Observation analog of a Unity Camera but with some notable differences. The sensor is made up of a grid of identical Box Colliders which designate the "cells" of the grid. The Grid Sensor also has a list of "detectable objects" in the form of Unity GameObject tags. When an object that is tagged as a detectable object is present within a cell's Box Collider, that cell is "activated" and a method on the Grid Sensor extracts data from said object and associates that data with the position of the activated cell. Thus the Grid Sensor is always orthographic: - - -geofx.com - -In practice it has been useful to center the Grid Sensor on the agent in such a way that it is equivalent to having a "top-down" orthographic view of the agent. - -Just like the Raycasts mentioned earlier, the Grid Sensor can extract any kind of data from a detected object, and just like the Camera, the Grid Sensor maintains the spacial relationship between nearby cells that allows one to take advantage of the computational properties of CNNs. Thus the Grid Sensor tries to take the best of both sensors and combines them to something that is more expressive. - -### Example of Grid Observations -A Grid Observation is best described using an example and a side by side comparison with the Raycasts and the Camera. - -Let's imagine a scenario where an agent is faced with two enemies and there are two "equipable" weapons somewhat behind the agent. It would be helpful for the agent to know the location and properties of both the enemies as well as the equippable items. For simplicity, let's assume enemies represent their health as a percentage (0-100%). Also assume that enemies and weapons are the only two kinds of objects that the agent would see in the entire game. - - - -#### Raycasts -If a raycast hits an object, not only could we get the distance (normalized by the maximum raycast distance) we would be able to extract its type (enemy vs weapon) and any attribute associate with it (e.g. an enemy's health). - -There are many ways in which one could encode this information but one reasonable encoding is this: -``` -raycastData = [isWeapon, isEnemy, health, normalizedDistance] -``` - -For example, if the raycast hit nothing then this would be represented by `[0, 0, 0, 1]`. -If instead the raycast hit an enemy with 60% health that is 50% of the maximum raycast distance, the data would be represented by `[0, 1, .6, .5]`. - -The limitations of raycasts which were presented above are easy to visualize in the below image. The agent is unable to see where the weapons are and only sees one of the enemies. Typically in the ML-Agents examples, this situation is mitigated by including previous frames of data so that the agent observes changes through time. However, in more complex games, it is not difficult to imagine scenarios where an agent might miss important information using only Raycasts. - - - -#### Camera - -Instead, if we used a camera, the agent would be able to see around itself. It would be able to see both enemies and weapons (assuming its field of view was wide enough) and this could be processed by a CNN to encode this information. However, ignoring the obvious limitation that the game would have to be rendered, the agent would not have immediate access to the health value of the enemies. Perhaps textures are added to include "visible damage" to the enemies or there may be health bars above the enemies heads but both of these additions are subject to change, especially in a game that is in development. By using the camera only, it forces the agent to learn a different behavior as it is not able to access what would otherwise be accessible data. - - - -#### Grid Sensor - -The data extraction method of the Grid Sensor is as open-ended as using the Raycasts to collect data. The `GetObjectData` method on the Grid Sensor can be overridden to collect whatever information is deemed useful for the performance of the agent. By default, only the tag is used. - -```csharp - protected virtual float[] GetObjectData(GameObject currentColliderGo, float typeIndex, float normalizedDistance) -``` - -Following the same data extraction method presented in the section on raycasts, if a Grid Sensor was used instead of Raycasts or a Camera, then not only would the agent be able to extract the health value of the enemies but it would also be able to encode the relative positions of those objects as is done with Camera. Additionally, as the texture of the objects is not used, this data can be collected without rendering the scene. - -In our example, we can collect data in the form of [objectType, health] by overriding `GetObjectData` as the following: -```csharp - protected override float[] GetObjectData(GameObject currentColliderGo, float type_index, float normalized_distance) - { - float[] channelValues = new float[ChannelDepth.Length]; // ChannelDepth.Length = 2 in this example - channelValues[0] = type_index; // this is the observation collected in default implementation - if (currentColliderGo.tag == "enemy") - { - var enemy = currentColliderGo.GetComponent(); - channelValues[1] = enemy.health; // the value may have to be normalized depends on the type of GridSensor encoding you use (see sections below) - } - return channelValues; - } -``` - - - -At the end of the Collection phase, each cell with an object inside of it has `GetObjectData` called and the returned values is then processed in the Encoding phase which is described in the next section. - -#### CountingGridSensor - -The CountingGridSensor builds on the GridSensor to perform the specific job of counting the number of object types that are based on the different detectable object tags. The encoding is meant to exploit a key feature of the GridSensor. In original GridSensor, only the closest detectable object, in relation to the agent, that lies within a cell is used for encoding the value for that cell. In the CountingGridSensor, the number of each type of object is recorded and then normalized according to a max count. - -An example of the CountingGridSensor can be found below. - - -## Encoding - -In order to support different ways of representing the data extracted from an object, multiple "depth types" were implemented. Each has pros and cons and, depending on the use-case of the Grid Sensor, one may be more beneficial than the others. - -The stored data that is extracted during the *Collection* phase may come from different sources, and thus be of a different nature. For instance, going back to the Enemy/Weapon example in the previous section, an enemy's health is continuous whereas the object type (enemy or weapon) is categorical data. This distinction is important as categorical data requires a different encoding mechanism than continuous data. - -The GridSensor handles this distinction with two user defined properties that define how this data is to be encoded: - -* DepthType - Enum signifying the encoding mode: Channel, ChannelHot -* ChannelDepth - `int[]` describing the range of each data and is used differently with different DepthType - -How categorical and continuous data is treated is different between the different DepthTypes as will be explored in the sections below. The sections will use an on-going example similar to the example mentioned earlier where, within a cell, the sensor observes: `an enemy with 60% health`. Thus the cell contains two kinds of data: categorical data (object type) and the continuous data (health). Additionally, the order of the observed tags is important as it allows one to encode the tag of the observed object by its index within the list of observed tags. Note that in the example, the observed tags is defined as ["weapon", "enemy"]. - -### Channel Based - -The Channel Based Grid Observations is perhaps the simplest in terms of usability and similarity with other machine learning applications. Each grid is of size WxHxC where C is the number of channels. To distinguish between categorical and continuous data, one would use the ChannelDepth array to signify the ranges that the values in the `channelValues` array could take. If one sets ChannelDepth[i] to be 1, it is assumed that the value of `channelValues[i]` is already normalized. Else ChannelDepth[i] represents the total number of possible values that `channelValues[i]` can take. - -For continuous data, you should specify `ChannelDepth[i]` to 1 and the collected data should be already normalized by its min/max range. For discrete data, you should specify `ChannelDepth[i]` to be the total number of possible values, and the collected data should be an integer value within range of `ChannelDepth[i]`. - -Using the example described earlier, if one was using Channel Based Grid Observations, they would have a ChannelDepth = {2, 1} to describe that there are two possible values for the first channel (ObjectType) and the 1 represents that the second channel (EnemyHealth) is continuous and should be already normalized. - -As the "enemy" is in the second position of the observed tags, its value can be normalized by: -For ObjectType, "weapon", "enemy" will be represented respectively as: -``` -weapon = DetectableObjects.IndexOfTag("weapon")/ChannelDepth[0] = 1/2 = 0.5; -enemy = DetectableObjects.IndexOfTag("enemy")/ChannelDepth[0] = 2/2 = 1; -``` - -By using this formula, if there wasn't an object within the cell then the value would be 0. - -As the ChannelDepth for the second channel is defined as 1, the collected health value (60% = 0.6) can be encoded directly. Thus the encoded data at this cell is: -`[1, .6]`. If the health in the game is not represented in a normalized form, for example if the health is represented in an integer ranging from -100 to 100, you'll need to manully nomalize it during collection. That is, If you get value 50, you need to normalize it by `50/(100- (-100))=0.25` and collect 0.25 instead of 50. - -At the end of the Encoding phase, the resulting Grid Observation would be a WxHx2 matrix. - -### Channel Hot - -The Channel Hot DepthType generalizes the classic OneHot encoding to differentiate combinations of different data. Rather than normalizing the data like in the Channel Based section, each element of `channelValues` is represented by an encoding based on the ChannelDepth. If ChannelDepth[i] = 1, then this represents that `channelValues[i]` is already normalized (between 0-1) and will be used directly within the encoding which is same as with Channel Based. However if ChannelDepth[i] is an integer greater than 1, then the value in `channelValues[i]` will be converted into a OneHot encoding based on the following: - -``` -float[] arr = new float[ChannelDepth[i] + 1]; -int index = (int) channelValues[i] + 1; -arr[index] = 1; -return arr; -``` - -The `+ 1` allows the first index of `arr` to be reserved for encoding "empty". - -The encoding of each channel is then concatenated together. Clearly using this setup allows the developer to be able to encode values using the classic OneHot encoding. Below are some different variations of the ChannelDepth which create different encodings of the example: - -##### ChannelDepth = {3, 1} -The first element, 3, signifies that there are three possibilities for the first channel and as the "enemy" is 2nd in the detected objects list, the "enemy" in the example is encoded as `[0, 0, 1]` where the first index represents "no object". The second element, 1, signifies that the health is already normalized and, following the table, is used directly. The resulting encoding is thus: -``` -[0, 0, 1, 0.6] -``` - -##### ChannelDepth = {3, 5} - -Like in the previous example, the "enemy" in the example is encoded as `[0, 0, 1]`. For the "health" however, the 5 signifies that the health should be represented by a OneHot encoding of 5 possible values, and in this case that encoding is `round(.6*5) = round(3) = 3 => [0, 0, 0, 1, 0]`. - -This encoding would then be concatenated together with the "enemy" encoding resulting in: -``` -enemy encoding => [0, 0, 1] -health encoding => [0, 0, 0, 1, 0] -final encoding => [0, 0, 1, 0, 0, 0, 1, 0] -``` - -The table below describes how other values of health would be mapped to OneHot encoding representations: - -| Range | OneHot Encoding | -|------------------|-----------------| -| health = 0 | [1, 0, 0, 0, 0] | -| 0 < health < .3 | [0, 1, 0, 0, 0] | -| .3 < health < .5 | [0, 0, 1, 0, 0] | -| .5 < health < .7 | [0, 0, 0, 1, 0] | -| .7 < health <= 1 | [0, 0, 0, 0, 1] | - - -##### ChannelDepth = {1, 1} -This setting of ChannelDepth would throw an error as there is not enough information to encode the categorical data of the object type. - - -### CountingGridSensor - -As mentioned above, the CountingGridSensor inherits from the GridSensor for the sole purpose of counting the different objects that lay within a cell. In order to normalize the counts so that the grid can be properly encoded as PNG, the ChannelDepth is used to represent the "maximum count" of each type. For the working example, if the ChannelDepth is set as {50, 10}, which represents that the maximum count for objects with the "weapon" and "enemy" tag is 50 and 10, respectively, then the resulting data would be: -``` -encoding = [0 weapons/ 50 weapons, 1 enemy / 10 enemies] = [0, .1] -``` - -## Communication - -At the end of the Encoding phase, all the Grid Observations will be sent to either the python side for training or to be used by a trained model within Unity. Since the data format is similar to images collected by Camera Sensors, Grid Observations also have the CompressionType option to specify whether to send the data directly or send in PNG compressed form for better communication efficiency. - -Once the bytes are sent to Python, they are then decoded and provided as a tensor of the correct shape. diff --git a/com.unity.ml-agents.extensions/Documentation~/com.unity.ml-agents.extensions.md b/com.unity.ml-agents.extensions/Documentation~/com.unity.ml-agents.extensions.md index 98914d4b36..326dadfd6c 100644 --- a/com.unity.ml-agents.extensions/Documentation~/com.unity.ml-agents.extensions.md +++ b/com.unity.ml-agents.extensions/Documentation~/com.unity.ml-agents.extensions.md @@ -19,7 +19,6 @@ The following table describes the package folder structure: | _Tests_ | Contains the unit tests for the package. | The Runtime directory currently contains these features: - * [Grid-based sensor](Grid-Sensor.md) * Physics-based sensors * [Input System Package Integration](InputActuatorComponent.md) diff --git a/com.unity.ml-agents.extensions/Documentation~/images/gridobs-vs-vectorobs.gif b/com.unity.ml-agents.extensions/Documentation~/images/gridobs-vs-vectorobs.gif deleted file mode 100644 index 42984682ce..0000000000 Binary files a/com.unity.ml-agents.extensions/Documentation~/images/gridobs-vs-vectorobs.gif and /dev/null differ diff --git a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-debug.png b/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-debug.png deleted file mode 100644 index edd5200ac6..0000000000 Binary files a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-debug.png and /dev/null differ diff --git a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-camera.png b/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-camera.png deleted file mode 100644 index c1b5e8e456..0000000000 Binary files a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-camera.png and /dev/null differ diff --git a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-gridsensor.png b/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-gridsensor.png deleted file mode 100644 index 4946b8b79a..0000000000 Binary files a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-gridsensor.png and /dev/null differ diff --git a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-raycast.png b/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-raycast.png deleted file mode 100644 index ee7de91fa6..0000000000 Binary files a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example-raycast.png and /dev/null differ diff --git a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example.png b/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example.png deleted file mode 100644 index fb31d80c8e..0000000000 Binary files a/com.unity.ml-agents.extensions/Documentation~/images/gridsensor-example.png and /dev/null differ diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/CountingGridSensor.cs b/com.unity.ml-agents.extensions/Runtime/Sensors/CountingGridSensor.cs index f9f83a1c11..f7a212be1e 100644 --- a/com.unity.ml-agents.extensions/Runtime/Sensors/CountingGridSensor.cs +++ b/com.unity.ml-agents.extensions/Runtime/Sensors/CountingGridSensor.cs @@ -13,16 +13,16 @@ public class CountingGridSensor : GridSensorBase /// /// The sensor name /// The scale of each cell in the grid - /// Number of cells on each side of the grid + /// Number of cells on each side of the grid /// Tags to be detected by the sensor /// Compression type public CountingGridSensor( string name, Vector3 cellScale, - Vector3Int gridNum, + Vector3Int gridSize, string[] detectableTags, SensorCompressionType compression - ) : base(name, cellScale, gridNum, detectableTags, compression) + ) : base(name, cellScale, gridSize, detectableTags, compression) { CompressionType = SensorCompressionType.None; } diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors.meta b/com.unity.ml-agents.extensions/Tests/Editor/GridSensors.meta deleted file mode 100644 index 634227b825..0000000000 --- a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 4690c621901ab49f2a557fa255c46622 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/com.unity.ml-agents.extensions/Tests/Runtime/Sensors/CountingGridSensorTests.cs b/com.unity.ml-agents.extensions/Tests/Runtime/Sensors/CountingGridSensorTests.cs new file mode 100644 index 0000000000..b4acb43609 --- /dev/null +++ b/com.unity.ml-agents.extensions/Tests/Runtime/Sensors/CountingGridSensorTests.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections; +using System.Linq; +using NUnit.Framework; +using UnityEngine; +using UnityEngine.TestTools; +using Unity.MLAgents.Sensors; +using Unity.MLAgents.Extensions.Sensors; +using Object = UnityEngine.Object; + +namespace Unity.MLAgents.Extensions.Tests.Sensors +{ + public class CountingGridSensorTests + { + GameObject testGo; + GameObject boxGo; + TestCountingGridSensorComponent gridSensorComponent; + + // Use built-in tags + const string k_Tag1 = "Player"; + const string k_Tag2 = "Respawn"; + + [UnitySetUp] + public IEnumerator SetupScene() + { + testGo = new GameObject("test"); + testGo.transform.position = Vector3.zero; + gridSensorComponent = testGo.AddComponent(); + + boxGo = new GameObject("block"); + boxGo.tag = k_Tag1; + boxGo.transform.position = new Vector3(3f, 0f, 3f); + boxGo.AddComponent(); + + yield return null; + } + + [TearDown] + public void ClearScene() + { + Object.DestroyImmediate(boxGo); + Object.DestroyImmediate(testGo); + } + + public class TestCountingGridSensorComponent : GridSensorComponent + { + public void SetParameters(string[] detectableTags) + { + DetectableTags = detectableTags; + CellScale = new Vector3(1, 0.01f, 1); + GridSize = new Vector3Int(10, 1, 10); + ColliderMask = LayerMask.GetMask("Default"); + RotateWithAgent = false; + CompressionType = SensorCompressionType.None; + } + + protected override GridSensorBase[] GetGridSensors() + { + return new GridSensorBase[] { + new CountingGridSensor( + "TestSensor", + CellScale, + GridSize, + DetectableTags, + CompressionType) }; + } + } + + // Copied from GridSensorTests in main package + public static float[][] DuplicateArray(float[] array, int numCopies) + { + float[][] duplicated = new float[numCopies][]; + for (int i = 0; i < numCopies; i++) + { + duplicated[i] = array; + } + return duplicated; + } + + // Copied from GridSensorTests in main package + public static void AssertSubarraysAtIndex(float[] total, int[] indicies, float[][] expectedArrays, float[] expectedDefaultArray) + { + int totalIndex = 0; + int subIndex = 0; + int subarrayIndex = 0; + int lenOfData = expectedDefaultArray.Length; + int numArrays = total.Length / lenOfData; + for (int i = 0; i < numArrays; i++) + { + totalIndex = i * lenOfData; + + if (indicies.Contains(i)) + { + subarrayIndex = Array.IndexOf(indicies, i); + for (subIndex = 0; subIndex < lenOfData; subIndex++) + { + Assert.AreEqual(expectedArrays[subarrayIndex][subIndex], total[totalIndex], + "Expected " + expectedArrays[subarrayIndex][subIndex] + " at subarray index " + totalIndex + ", index = " + subIndex + " but was " + total[totalIndex]); + totalIndex++; + } + } + else + { + for (subIndex = 0; subIndex < lenOfData; subIndex++) + { + Assert.AreEqual(expectedDefaultArray[subIndex], total[totalIndex], + "Expected default value " + expectedDefaultArray[subIndex] + " at subarray index " + totalIndex + ", index = " + subIndex + " but was " + total[totalIndex]); + totalIndex++; + } + } + } + } + + [Test] + public void TestCountingSensor() + { + string[] tags = { k_Tag1, k_Tag2 }; + gridSensorComponent.SetParameters(tags); + var gridSensor = (CountingGridSensor)gridSensorComponent.CreateSensors()[0]; + Assert.AreEqual(gridSensor.PerceptionBuffer.Length, 10 * 10 * 2); + + gridSensor.Update(); + + int[] subarrayIndicies = new int[] { 77, 78, 87, 88 }; + float[][] expectedSubarrays = DuplicateArray(new float[] { 1, 0 }, 4); + float[] expectedDefault = new float[] { 0, 0 }; + AssertSubarraysAtIndex(gridSensor.PerceptionBuffer, subarrayIndicies, expectedSubarrays, expectedDefault); + + var boxGo2 = new GameObject("block"); + boxGo2.tag = k_Tag1; + boxGo2.transform.position = new Vector3(3.1f, 0f, 3f); + boxGo2.AddComponent(); + + gridSensor.Update(); + + subarrayIndicies = new int[] { 77, 78, 87, 88 }; + expectedSubarrays = DuplicateArray(new float[] { 2, 0 }, 4); + expectedDefault = new float[] { 0, 0 }; + AssertSubarraysAtIndex(gridSensor.PerceptionBuffer, subarrayIndicies, expectedSubarrays, expectedDefault); + Object.DestroyImmediate(boxGo2); + } + } +} diff --git a/com.unity.ml-agents.extensions/Tests/Runtime/Sensors/CountingGridSensorTests.cs.meta b/com.unity.ml-agents.extensions/Tests/Runtime/Sensors/CountingGridSensorTests.cs.meta new file mode 100644 index 0000000000..f1a711faf8 --- /dev/null +++ b/com.unity.ml-agents.extensions/Tests/Runtime/Sensors/CountingGridSensorTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2a1d17f91519347e0a8692e2816b7c8b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/com.unity.ml-agents/CHANGELOG.md b/com.unity.ml-agents/CHANGELOG.md index f75162e9d5..9889188083 100755 --- a/com.unity.ml-agents/CHANGELOG.md +++ b/com.unity.ml-agents/CHANGELOG.md @@ -23,13 +23,16 @@ and `IDimensionPropertiesSensor` interfaces were removed. (#5127) - `ISensor.GetCompressionType()` was removed, and `GetCompressionSpec()` was added. The `ISparseChannelSensor` interface was removed. (#5164) - The abstract method `SensorComponent.GetObservationShape()` was no longer being called, so it has been removed. (#5172) -- `SensorComponent.CreateSensor()` was replaced with `SensorComponent.CreateSensor()`, which returns an `ISensor[]`. (#5181) +- `SensorComponent.CreateSensor()` was replaced with `SensorComponent.CreateSensors()`, which returns an `ISensor[]`. (#5181) - `Match3Sensor` was refactored to produce cell and special type observations separately, and `Match3SensorComponent` now produces two `Match3Sensor`s (unless there are no special types). Previously trained models will have different observation sizes and will need to be retrained. (#5181) - The `AbstractBoard` class for integration with Match-3 games was changed to make it easier to support boards with different sizes using the same model. For a summary of the interface changes, please see the Migration Guide. (##5189) - Updated the Barracuda package to version `1.3.3-preview`(#5236) +- `GridSensor` has been refactored and moved to main package, with changes to both sensor interfaces and behaviors. +Exsisting GridSensor created by extension package will not work in newer version. Previously trained models will +need to be retrained. Please see the Migration Guide for more details. (#5256) ### Minor Changes #### com.unity.ml-agents / com.unity.ml-agents.extensions (C#) diff --git a/com.unity.ml-agents.extensions/Editor/GridSensorComponentEditor.cs b/com.unity.ml-agents/Editor/GridSensorComponentEditor.cs similarity index 85% rename from com.unity.ml-agents.extensions/Editor/GridSensorComponentEditor.cs rename to com.unity.ml-agents/Editor/GridSensorComponentEditor.cs index 9c034af00d..a16b1a3d19 100644 --- a/com.unity.ml-agents.extensions/Editor/GridSensorComponentEditor.cs +++ b/com.unity.ml-agents/Editor/GridSensorComponentEditor.cs @@ -1,9 +1,8 @@ using UnityEditor; using UnityEngine; -using Unity.MLAgents.Editor; -using Unity.MLAgents.Extensions.Sensors; +using Unity.MLAgents.Sensors; -namespace Unity.MLAgents.Extensions.Editor +namespace Unity.MLAgents.Editor { [CustomEditor(typeof(GridSensorComponent))] [CanEditMultipleObjects] @@ -11,6 +10,12 @@ internal class GridSensorComponentEditor : UnityEditor.Editor { public override void OnInspectorGUI() { +#if !MLA_UNITY_PHYSICS_MODULE + EditorGUILayout.HelpBox("The Physics Module is not currently present. " + + "Please add it to your project in order to use the GridSensor APIs in the " + + $"{nameof(GridSensorComponent)}", MessageType.Warning); +#endif + var so = serializedObject; so.Update(); @@ -25,11 +30,11 @@ public override void OnInspectorGUI() EditorGUILayout.LabelField("Grid Settings", EditorStyles.boldLabel); EditorGUILayout.PropertyField(so.FindProperty(nameof(GridSensorComponent.m_CellScale)), true); - // We only supports 2D GridSensor now so display gridNumSide as Vector2 + // We only supports 2D GridSensor now so lock gridSize.y to 1 var gridSize = so.FindProperty(nameof(GridSensorComponent.m_GridSize)); - var gridSize2d = new Vector2Int(gridSize.vector3IntValue.x, gridSize.vector3IntValue.z); - var newGridSize = EditorGUILayout.Vector2IntField("Grid Size", gridSize2d); - gridSize.vector3IntValue = new Vector3Int(newGridSize.x, 1, newGridSize.y); + var gridSize2d = new Vector3Int(gridSize.vector3IntValue.x, 1, gridSize.vector3IntValue.z); + var newGridSize = EditorGUILayout.Vector3IntField("Grid Size", gridSize2d); + gridSize.vector3IntValue = new Vector3Int(newGridSize.x, 1, newGridSize.z); } EditorGUI.EndDisabledGroup(); EditorGUILayout.PropertyField(so.FindProperty(nameof(GridSensorComponent.m_RotateWithAgent)), true); @@ -50,10 +55,6 @@ public override void OnInspectorGUI() EditorGUILayout.PropertyField(objectTag, new GUIContent("Tag " + i), true); } EditorGUI.indentLevel--; - - EditorGUILayout.LabelField("Observation Settings", EditorStyles.boldLabel); - EditorGUILayout.PropertyField(so.FindProperty(nameof(GridSensorComponent.m_UseOneHotTag)), new GUIContent("One-Hot Tag Index"), true); - EditorGUILayout.PropertyField(so.FindProperty(nameof(GridSensorComponent.m_CountColliders)), new GUIContent("Detectable Tag Count"), true); } EditorGUI.EndDisabledGroup(); EditorGUILayout.PropertyField(so.FindProperty(nameof(GridSensorComponent.m_ColliderMask)), true); @@ -80,6 +81,7 @@ public override void OnInspectorGUI() { debugColors.arraySize = detectableObjectSize; } + EditorGUILayout.LabelField("Debug Colors"); EditorGUI.indentLevel++; for (var i = 0; i < debugColors.arraySize; i++) { diff --git a/com.unity.ml-agents.extensions/Editor/GridSensorComponentEditor.cs.meta b/com.unity.ml-agents/Editor/GridSensorComponentEditor.cs.meta similarity index 83% rename from com.unity.ml-agents.extensions/Editor/GridSensorComponentEditor.cs.meta rename to com.unity.ml-agents/Editor/GridSensorComponentEditor.cs.meta index 6d28aa3ac5..c27459abce 100644 --- a/com.unity.ml-agents.extensions/Editor/GridSensorComponentEditor.cs.meta +++ b/com.unity.ml-agents/Editor/GridSensorComponentEditor.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 62dc58d0ddf584affa1f269e9c5791c2 +guid: 584686b36fcb2435c8be47d70c332ed0 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/BoxOverlapChecker.cs b/com.unity.ml-agents/Runtime/Sensors/BoxOverlapChecker.cs similarity index 97% rename from com.unity.ml-agents.extensions/Runtime/Sensors/BoxOverlapChecker.cs rename to com.unity.ml-agents/Runtime/Sensors/BoxOverlapChecker.cs index ea49b9f742..7a6f0f5310 100644 --- a/com.unity.ml-agents.extensions/Runtime/Sensors/BoxOverlapChecker.cs +++ b/com.unity.ml-agents/Runtime/Sensors/BoxOverlapChecker.cs @@ -1,9 +1,7 @@ using System; -using System.Runtime.CompilerServices; using UnityEngine; -[assembly: InternalsVisibleTo("Unity.ML-Agents.Extensions.EditorTests")] -namespace Unity.MLAgents.Extensions.Sensors +namespace Unity.MLAgents.Sensors { internal class BoxOverlapChecker { @@ -20,11 +18,14 @@ internal class BoxOverlapChecker Vector3 m_HalfCellScale; Vector3 m_CellCenterOffset; Vector3[] m_CellLocalPositions; + +#if MLA_UNITY_PHYSICS_MODULE Collider[] m_ColliderBuffer; public event Action GridOverlapDetectedAll; public event Action GridOverlapDetectedClosest; public event Action GridOverlapDetectedDebug; +#endif public BoxOverlapChecker( Vector3 cellScale, @@ -48,7 +49,9 @@ public BoxOverlapChecker( m_NumCells = gridSize.x * gridSize.z; m_HalfCellScale = new Vector3(cellScale.x / 2f, cellScale.y, cellScale.z / 2f); m_CellCenterOffset = new Vector3((gridSize.x - 1f) / 2, 0, (gridSize.z - 1f) / 2); +#if MLA_UNITY_PHYSICS_MODULE m_ColliderBuffer = new Collider[Math.Min(m_MaxColliderBufferSize, m_InitialColliderBufferSize)]; +#endif InitCellLocalPositions(); } @@ -105,41 +108,13 @@ internal Quaternion GetGridRotation() return m_RotateWithAgent ? m_RootReference.transform.rotation : Quaternion.identity; } - /// - /// This method attempts to perform the Physics.OverlapBoxNonAlloc and will double the size of the Collider buffer - /// if the number of Colliders in the buffer after the call is equal to the length of the buffer. - /// - /// - /// - /// - /// - int BufferResizingOverlapBoxNonAlloc(Vector3 cellCenter, Vector3 halfCellScale, Quaternion rotation) - { - int numFound; - // Since we can only get a fixed number of results, requery - // until we're sure we can hold them all (or until we hit the max size). - while (true) - { - numFound = Physics.OverlapBoxNonAlloc(cellCenter, halfCellScale, m_ColliderBuffer, rotation, m_ColliderMask); - if (numFound == m_ColliderBuffer.Length && m_ColliderBuffer.Length < m_MaxColliderBufferSize) - { - m_ColliderBuffer = new Collider[Math.Min(m_MaxColliderBufferSize, m_ColliderBuffer.Length * 2)]; - m_InitialColliderBufferSize = m_ColliderBuffer.Length; - } - else - { - break; - } - } - return numFound; - } - /// /// Perceive the latest grid status. Call OverlapBoxNonAlloc once to detect colliders. /// Then parse the collider arrays according to all available gridSensor delegates. /// internal void Update() { +#if MLA_UNITY_PHYSICS_MODULE for (var cellIndex = 0; cellIndex < m_NumCells; cellIndex++) { var cellCenter = GetCellGlobalPosition(cellIndex); @@ -154,6 +129,7 @@ internal void Update() ParseCollidersClosest(m_ColliderBuffer, numFound, cellIndex, cellCenter, GridOverlapDetectedClosest); } } +#endif } /// @@ -161,6 +137,7 @@ internal void Update() /// internal void UpdateGizmo() { +#if MLA_UNITY_PHYSICS_MODULE for (var cellIndex = 0; cellIndex < m_NumCells; cellIndex++) { var cellCenter = GetCellGlobalPosition(cellIndex); @@ -168,6 +145,37 @@ internal void UpdateGizmo() ParseCollidersClosest(m_ColliderBuffer, numFound, cellIndex, cellCenter, GridOverlapDetectedDebug); } +#endif + } + +#if MLA_UNITY_PHYSICS_MODULE + /// + /// This method attempts to perform the Physics.OverlapBoxNonAlloc and will double the size of the Collider buffer + /// if the number of Colliders in the buffer after the call is equal to the length of the buffer. + /// + /// + /// + /// + /// + int BufferResizingOverlapBoxNonAlloc(Vector3 cellCenter, Vector3 halfCellScale, Quaternion rotation) + { + int numFound; + // Since we can only get a fixed number of results, requery + // until we're sure we can hold them all (or until we hit the max size). + while (true) + { + numFound = Physics.OverlapBoxNonAlloc(cellCenter, halfCellScale, m_ColliderBuffer, rotation, m_ColliderMask); + if (numFound == m_ColliderBuffer.Length && m_ColliderBuffer.Length < m_MaxColliderBufferSize) + { + m_ColliderBuffer = new Collider[Math.Min(m_MaxColliderBufferSize, m_ColliderBuffer.Length * 2)]; + m_InitialColliderBufferSize = m_ColliderBuffer.Length; + } + else + { + break; + } + } + return numFound; } /// @@ -233,9 +241,11 @@ void ParseCollidersAll(Collider[] foundColliders, int numFound, int cellIndex, V } } } +#endif internal void RegisterSensor(GridSensorBase sensor) { +#if MLA_UNITY_PHYSICS_MODULE if (sensor.GetProcessCollidersMethod() == ProcessCollidersMethod.ProcessAllColliders) { GridOverlapDetectedAll += sensor.ProcessDetectedObject; @@ -244,11 +254,14 @@ internal void RegisterSensor(GridSensorBase sensor) { GridOverlapDetectedClosest += sensor.ProcessDetectedObject; } +#endif } internal void RegisterDebugSensor(GridSensorBase debugSensor) { +#if MLA_UNITY_PHYSICS_MODULE GridOverlapDetectedDebug += debugSensor.ProcessDetectedObject; +#endif } } } diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/BoxOverlapChecker.cs.meta b/com.unity.ml-agents/Runtime/Sensors/BoxOverlapChecker.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Runtime/Sensors/BoxOverlapChecker.cs.meta rename to com.unity.ml-agents/Runtime/Sensors/BoxOverlapChecker.cs.meta diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorBase.cs b/com.unity.ml-agents/Runtime/Sensors/GridSensorBase.cs similarity index 93% rename from com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorBase.cs rename to com.unity.ml-agents/Runtime/Sensors/GridSensorBase.cs index ca4fd09252..74398ddcfd 100644 --- a/com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorBase.cs +++ b/com.unity.ml-agents/Runtime/Sensors/GridSensorBase.cs @@ -1,13 +1,9 @@ using System; using System.Collections.Generic; -using System.Runtime.CompilerServices; using UnityEngine; -using Unity.MLAgents.Sensors; using UnityEngine.Profiling; -using Object = UnityEngine.Object; -[assembly: InternalsVisibleTo("Unity.ML-Agents.Extensions.EditorTests")] -namespace Unity.MLAgents.Extensions.Sensors +namespace Unity.MLAgents.Sensors { /// /// The way the GridSensor process detected colliders in a cell. @@ -55,20 +51,20 @@ public class GridSensorBase : ISensor, IBuiltInSensor, IDisposable /// /// The sensor name /// The scale of each cell in the grid - /// Number of cells on each side of the grid + /// Number of cells on each side of the grid /// Tags to be detected by the sensor /// Compression type public GridSensorBase( string name, Vector3 cellScale, - Vector3Int gridNum, + Vector3Int gridSize, string[] detectableTags, SensorCompressionType compression ) { m_Name = name; m_CellScale = cellScale; - m_GridSize = gridNum; + m_GridSize = gridSize; m_DetectableTags = detectableTags; CompressionType = compression; @@ -344,23 +340,9 @@ public void Dispose() { if (!ReferenceEquals(null, m_PerceptionTexture)) { - DestroyTexture(m_PerceptionTexture); + Utilities.DestroyTexture(m_PerceptionTexture); m_PerceptionTexture = null; } } - - static void DestroyTexture(Texture2D texture) - { - if (Application.isEditor) - { - // Edit Mode tests complain if we use Destroy() - // TODO move to extension methods for UnityEngine.Object? - Object.DestroyImmediate(texture); - } - else - { - Object.Destroy(texture); - } - } } } diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorBase.cs.meta b/com.unity.ml-agents/Runtime/Sensors/GridSensorBase.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorBase.cs.meta rename to com.unity.ml-agents/Runtime/Sensors/GridSensorBase.cs.meta diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorComponent.cs b/com.unity.ml-agents/Runtime/Sensors/GridSensorComponent.cs similarity index 87% rename from com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorComponent.cs rename to com.unity.ml-agents/Runtime/Sensors/GridSensorComponent.cs index 56c74ad8f6..034809729b 100644 --- a/com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorComponent.cs +++ b/com.unity.ml-agents/Runtime/Sensors/GridSensorComponent.cs @@ -1,10 +1,7 @@ using System.Collections.Generic; -using System.Runtime.CompilerServices; using UnityEngine; -using Unity.MLAgents.Sensors; -[assembly: InternalsVisibleTo("Unity.ML-Agents.Extensions.EditorTests")] -namespace Unity.MLAgents.Extensions.Sensors +namespace Unity.MLAgents.Sensors { /// /// A SensorComponent that creates a . @@ -18,7 +15,7 @@ public class GridSensorComponent : SensorComponent internal BoxOverlapChecker m_BoxOverlapChecker; [HideInInspector, SerializeField] - internal string m_SensorName = "GridSensor"; + protected internal string m_SensorName = "GridSensor"; /// /// Name of the generated object. /// Note that changing this at runtime does not affect how the Agent sorts the sensors. @@ -184,30 +181,6 @@ public int ObservationStacks set { m_ObservationStacks = value; } } - [HideInInspector, SerializeField] - internal bool m_UseOneHotTag = true; - /// - /// Whether to use one-hot representation of detected tag as observation. - /// Note that changing this after the sensor is created has no effect. - /// - public bool UseOneHotTag - { - get { return m_UseOneHotTag; } - set { m_UseOneHotTag = value; } - } - - [HideInInspector, SerializeField] - internal bool m_CountColliders = false; - /// - /// Whether to use the number of count for each detectable tag as observation. - /// Note that changing this after the sensor is created has no effect. - /// - public bool CountColliders - { - get { return m_CountColliders; } - set { m_CountColliders = value; } - } - /// public override ISensor[] CreateSensors() { @@ -260,16 +233,8 @@ public override ISensor[] CreateSensors() protected virtual GridSensorBase[] GetGridSensors() { List sensorList = new List(); - if (m_UseOneHotTag) - { - var sensor = new OneHotGridSensor(m_SensorName + "-OneHot", m_CellScale, m_GridSize, m_DetectableTags, m_CompressionType); - sensorList.Add(sensor); - } - if (m_CountColliders) - { - var sensor = new CountingGridSensor(m_SensorName + "-Counting", m_CellScale, m_GridSize, m_DetectableTags, m_CompressionType); - sensorList.Add(sensor); - } + var sensor = new OneHotGridSensor(m_SensorName + "-OneHot", m_CellScale, m_GridSize, m_DetectableTags, m_CompressionType); + sensorList.Add(sensor); return sensorList.ToArray(); } diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorComponent.cs.meta b/com.unity.ml-agents/Runtime/Sensors/GridSensorComponent.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Runtime/Sensors/GridSensorComponent.cs.meta rename to com.unity.ml-agents/Runtime/Sensors/GridSensorComponent.cs.meta diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/OneHotGridSensor.cs b/com.unity.ml-agents/Runtime/Sensors/OneHotGridSensor.cs similarity index 88% rename from com.unity.ml-agents.extensions/Runtime/Sensors/OneHotGridSensor.cs rename to com.unity.ml-agents/Runtime/Sensors/OneHotGridSensor.cs index 459785eab7..648c702d80 100644 --- a/com.unity.ml-agents.extensions/Runtime/Sensors/OneHotGridSensor.cs +++ b/com.unity.ml-agents/Runtime/Sensors/OneHotGridSensor.cs @@ -1,7 +1,6 @@ using UnityEngine; -using Unity.MLAgents.Sensors; -namespace Unity.MLAgents.Extensions.Sensors +namespace Unity.MLAgents.Sensors { /// /// Grid-based sensor with one-hot observations. @@ -13,16 +12,16 @@ public class OneHotGridSensor : GridSensorBase /// /// The sensor name /// The scale of each cell in the grid - /// Number of cells on each side of the grid + /// Number of cells on each side of the grid /// Tags to be detected by the sensor /// Compression type public OneHotGridSensor( string name, Vector3 cellScale, - Vector3Int gridNum, + Vector3Int gridSize, string[] detectableTags, SensorCompressionType compression - ) : base(name, cellScale, gridNum, detectableTags, compression) + ) : base(name, cellScale, gridSize, detectableTags, compression) { } diff --git a/com.unity.ml-agents.extensions/Runtime/Sensors/OneHotGridSensor.cs.meta b/com.unity.ml-agents/Runtime/Sensors/OneHotGridSensor.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Runtime/Sensors/OneHotGridSensor.cs.meta rename to com.unity.ml-agents/Runtime/Sensors/OneHotGridSensor.cs.meta diff --git a/com.unity.ml-agents/Runtime/Sensors/RayPerceptionSensor.cs b/com.unity.ml-agents/Runtime/Sensors/RayPerceptionSensor.cs index 319f675087..4380232cb2 100644 --- a/com.unity.ml-agents/Runtime/Sensors/RayPerceptionSensor.cs +++ b/com.unity.ml-agents/Runtime/Sensors/RayPerceptionSensor.cs @@ -319,7 +319,7 @@ public int Write(ObservationWriter writer) // For each ray, write the information to the observation buffer for (var rayIndex = 0; rayIndex < numRays; rayIndex++) { - m_RayPerceptionOutput.RayOutputs[rayIndex].ToFloatArray(numDetectableTags, rayIndex, m_Observations); + m_RayPerceptionOutput.RayOutputs?[rayIndex].ToFloatArray(numDetectableTags, rayIndex, m_Observations); } // Finally, add the observations to the ObservationWriter diff --git a/com.unity.ml-agents/Tests/Runtime/RuntimeAPITest.cs b/com.unity.ml-agents/Tests/Runtime/RuntimeAPITest.cs index 89c790c126..0c3b6312b4 100644 --- a/com.unity.ml-agents/Tests/Runtime/RuntimeAPITest.cs +++ b/com.unity.ml-agents/Tests/Runtime/RuntimeAPITest.cs @@ -75,7 +75,7 @@ public IEnumerator RuntimeApiTestWithEnumeratorPasses() // Can't actually create an Agent with InferenceOnly and no model, so change back behaviorParams.BehaviorType = BehaviorType.Default; -#if MLA_UNITY_PHSYICS_MODULE +#if MLA_UNITY_PHYSICS_MODULE var sensorComponent = gameObject.AddComponent(); sensorComponent.SensorName = "ray3d"; sensorComponent.DetectableTags = new List { "Player", "Respawn" }; @@ -104,7 +104,7 @@ public IEnumerator RuntimeApiTestWithEnumeratorPasses() decisionRequester.DecisionPeriod = 2; decisionRequester.TakeActionsBetweenDecisions = true; -#if MLA_UNITY_PHSYICS_MODULE +#if MLA_UNITY_PHYSICS_MODULE // Initialization should set up the sensors Assert.IsNotNull(sensorComponent.RaySensor); #endif diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/BoxOverlapCheckerTests.cs b/com.unity.ml-agents/Tests/Runtime/Sensor/BoxOverlapCheckerTests.cs similarity index 98% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/BoxOverlapCheckerTests.cs rename to com.unity.ml-agents/Tests/Runtime/Sensor/BoxOverlapCheckerTests.cs index a9f2e0b272..5f5651c0c5 100644 --- a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/BoxOverlapCheckerTests.cs +++ b/com.unity.ml-agents/Tests/Runtime/Sensor/BoxOverlapCheckerTests.cs @@ -1,10 +1,11 @@ +#if MLA_UNITY_PHYSICS_MODULE using System.Collections.Generic; using System.Reflection; using NUnit.Framework; using UnityEngine; -using Unity.MLAgents.Extensions.Sensors; +using Unity.MLAgents.Sensors; -namespace Unity.MLAgents.Extensions.Tests.GridSensors +namespace Unity.MLAgents.Tests { internal class TestBoxOverlapChecker : BoxOverlapChecker { @@ -280,7 +281,7 @@ public void TestOnlyOneChecker() var testGo = new GameObject("test"); testGo.transform.position = Vector3.zero; var gridSensorComponent = testGo.AddComponent(); - gridSensorComponent.SetComponentParameters(useGridSensorBase: true, useOneHotTag: true, countColliders: true); + gridSensorComponent.SetComponentParameters(useGridSensorBase: true, useTestingGridSensor: true); var sensors = gridSensorComponent.CreateSensors(); int numChecker = 0; foreach (var sensor in sensors) @@ -295,3 +296,4 @@ public void TestOnlyOneChecker() } } } +#endif diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/BoxOverlapCheckerTests.cs.meta b/com.unity.ml-agents/Tests/Runtime/Sensor/BoxOverlapCheckerTests.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/BoxOverlapCheckerTests.cs.meta rename to com.unity.ml-agents/Tests/Runtime/Sensor/BoxOverlapCheckerTests.cs.meta diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTestUtils.cs b/com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTestUtils.cs similarity index 98% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTestUtils.cs rename to com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTestUtils.cs index 7b8c304daa..548262be96 100644 --- a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTestUtils.cs +++ b/com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTestUtils.cs @@ -2,7 +2,7 @@ using System; using System.Linq; -namespace Unity.MLAgents.Extensions.Tests.GridSensors +namespace Unity.MLAgents.Tests { public static class GridObsTestUtils { diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTestUtils.cs.meta b/com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTestUtils.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTestUtils.cs.meta rename to com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTestUtils.cs.meta diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTests.cs b/com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTests.cs similarity index 81% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTests.cs rename to com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTests.cs index 9decbb825d..1f3c4a5421 100644 --- a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTests.cs +++ b/com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTests.cs @@ -1,11 +1,11 @@ +#if MLA_UNITY_PHYSICS_MODULE using System.Collections; using NUnit.Framework; using UnityEngine; using UnityEngine.TestTools; using Unity.MLAgents.Sensors; -using Unity.MLAgents.Extensions.Sensors; -namespace Unity.MLAgents.Extensions.Tests.GridSensors +namespace Unity.MLAgents.Tests { public class GridSensorTests { @@ -137,36 +137,6 @@ public void TestOneHotSensor() GridObsTestUtils.AssertSubarraysAtIndex(gridSensor.PerceptionBuffer, subarrayIndicies, expectedSubarrays, expectedDefault); } - [Test] - public void TestCountingSensor() - { - testGo.tag = k_Tag2; - string[] tags = { k_Tag1, k_Tag2 }; - gridSensorComponent.SetComponentParameters(tags, countColliders: true); - var gridSensor = (CountingGridSensor)gridSensorComponent.CreateSensors()[0]; - Assert.AreEqual(gridSensor.PerceptionBuffer.Length, 10 * 10 * 2); - - gridSensor.Update(); - - int[] subarrayIndicies = new int[] { 77, 78, 87, 88 }; - float[][] expectedSubarrays = GridObsTestUtils.DuplicateArray(new float[] { 1, 0 }, 4); - float[] expectedDefault = new float[] { 0, 0 }; - GridObsTestUtils.AssertSubarraysAtIndex(gridSensor.PerceptionBuffer, subarrayIndicies, expectedSubarrays, expectedDefault); - - var boxGo2 = new GameObject("block"); - boxGo2.tag = k_Tag1; - boxGo2.transform.position = new Vector3(3.1f, 0f, 3f); - boxGo2.AddComponent(); - - gridSensor.Update(); - - subarrayIndicies = new int[] { 77, 78, 87, 88 }; - expectedSubarrays = GridObsTestUtils.DuplicateArray(new float[] { 2, 0 }, 4); - expectedDefault = new float[] { 0, 0 }; - GridObsTestUtils.AssertSubarraysAtIndex(gridSensor.PerceptionBuffer, subarrayIndicies, expectedSubarrays, expectedDefault); - Object.DestroyImmediate(boxGo2); - } - [Test] public void TestCustomSensorInvalidData() { @@ -188,7 +158,7 @@ public void TestMultipleSensors() { testGo.tag = k_Tag2; string[] tags = { k_Tag1, k_Tag2 }; - gridSensorComponent.SetComponentParameters(tags, useOneHotTag: true, countColliders: true, useTestingGridSensor: true); + gridSensorComponent.SetComponentParameters(tags, useOneHotTag: true, useGridSensorBase: true, useTestingGridSensor: true); var gridSensors = gridSensorComponent.CreateSensors(); Assert.IsNotNull(((GridSensorBase)gridSensors[0]).m_BoxOverlapChecker); Assert.IsNull(((GridSensorBase)gridSensors[1]).m_BoxOverlapChecker); @@ -208,3 +178,4 @@ public void TestNoSensors() } } } +#endif diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTests.cs.meta b/com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTests.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/GridSensorTests.cs.meta rename to com.unity.ml-agents/Tests/Runtime/Sensor/GridSensorTests.cs.meta diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/SimpleTestGridSensor.cs b/com.unity.ml-agents/Tests/Runtime/Sensor/SimpleTestGridSensor.cs similarity index 87% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/SimpleTestGridSensor.cs rename to com.unity.ml-agents/Tests/Runtime/Sensor/SimpleTestGridSensor.cs index 3e7ad18b7b..eb576d8f78 100644 --- a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/SimpleTestGridSensor.cs +++ b/com.unity.ml-agents/Tests/Runtime/Sensor/SimpleTestGridSensor.cs @@ -1,9 +1,8 @@ -using System.Linq; +using System.Collections.Generic; using UnityEngine; using Unity.MLAgents.Sensors; -using Unity.MLAgents.Extensions.Sensors; -namespace Unity.MLAgents.Extensions.Tests.GridSensors +namespace Unity.MLAgents.Tests { public static class TestGridSensorConfig { @@ -58,6 +57,7 @@ protected internal override ProcessCollidersMethod GetProcessCollidersMethod() { return TestGridSensorConfig.ParseAllColliders ? ProcessCollidersMethod.ProcessAllColliders : ProcessCollidersMethod.ProcessClosestColliders; } + protected override void GetObjectData(GameObject detectedObject, int typeIndex, float[] dataBuffer) { for (var i = 0; i < DummyData.Length; i++) @@ -69,12 +69,24 @@ protected override void GetObjectData(GameObject detectedObject, int typeIndex, public class SimpleTestGridSensorComponent : GridSensorComponent { + bool m_UseOneHotTag; bool m_UseTestingGridSensor; bool m_UseGridSensorBase; protected override GridSensorBase[] GetGridSensors() { - var sensorList = base.GetGridSensors().ToList(); + List sensorList = new List(); + if (m_UseOneHotTag) + { + var testSensor = new OneHotGridSensor( + SensorName, + CellScale, + GridSize, + DetectableTags, + CompressionType + ); + sensorList.Add(testSensor); + } if (m_UseGridSensorBase) { var testSensor = new GridSensorBase( @@ -111,7 +123,6 @@ public void SetComponentParameters( SensorCompressionType compression = SensorCompressionType.None, bool rotateWithAgent = false, bool useOneHotTag = false, - bool countColliders = false, bool useTestingGridSensor = false, bool useGridSensorBase = false ) @@ -122,8 +133,7 @@ public void SetComponentParameters( ColliderMask = colliderMaskInt < 0 ? LayerMask.GetMask("Default") : colliderMaskInt; RotateWithAgent = rotateWithAgent; CompressionType = compression; - UseOneHotTag = useOneHotTag; - CountColliders = countColliders; + m_UseOneHotTag = useOneHotTag; m_UseGridSensorBase = useGridSensorBase; m_UseTestingGridSensor = useTestingGridSensor; } diff --git a/com.unity.ml-agents.extensions/Tests/Editor/GridSensors/SimpleTestGridSensor.cs.meta b/com.unity.ml-agents/Tests/Runtime/Sensor/SimpleTestGridSensor.cs.meta similarity index 100% rename from com.unity.ml-agents.extensions/Tests/Editor/GridSensors/SimpleTestGridSensor.cs.meta rename to com.unity.ml-agents/Tests/Runtime/Sensor/SimpleTestGridSensor.cs.meta diff --git a/docs/Migrating.md b/docs/Migrating.md index b2124fafa0..eb41c7078b 100644 --- a/docs/Migrating.md +++ b/docs/Migrating.md @@ -107,6 +107,34 @@ you wish to use a single behavior to work with multiple board sizes, override `G current `BoardSize`. The values returned by `GetCurrentBoardSize()` must be less than or equal to the corresponding values from `GetMaxBoardSize()`. +### GridSensor changes +The sensor configuration has changed: +* The sensor implementation has been refactored and exsisting GridSensor created from extension package +will not work in newer version. Some errors might show up when loading the old sensor in the scene. +You'll need to remove the old sensor and create a new GridSensor. +* These parameters names have changed but still refer to the same concept in the sensor: `GridNumSide` -> `GridSize`, +`RotateToAgent` -> `RotateWithAgent`, `ObserveMask` -> `ColliderMask`, `DetectableObjects` -> `DetectableTags` +* `RootReference` is removed and the sensor component's GameObject will always be ignored for hit results. +* `DepthType` (`ChanelBase`/`ChannelHot`) option and `ChannelDepth` are removed. Now the default is +one-hot encoding for detected tag. If you were using original GridSensor without overriding any method, +switching to new GridSensor will produce similar effect for training although the actual observations +will be slightly different. + +For creating your GridSensor implementation with custom data: +* To create custom GridSensor, derive from `GridSensorBase` instead of `GridSensor`. Besides overriding +`GetObjectData()`, you will also need to consider override `GetCellObservationSize()`, `IsDataNormalized()` +and `GetProcessCollidersMethod()` according to the data you collect. Also you'll need to override +`GridSensorComponent.GetGridSensors()` and return your custom GridSensor. +* The input argument `tagIndex` in `GetObjectData()` has changed from 1-indexed to 0-indexed and the +data type changed from `float` to `int`. The index of first detectable tag will be 0 instead of 1. +`normalizedDistance` was removed from input. +* The observation data should be written to the input `dataBuffer` instead of creating and returning a new array. +* Removed the constraint of all data required to be normalized. You should specify it in `IsDataNormalized()`. +Sensors with non-normalized data cannot use PNG compression type. +* The sensor will not further encode the data recieved from `GetObjectData()` anymore. The values +recieved from `GetObjectData()` will be the observation sent to the trainer. + + ## Migrating to Release 13 ### Implementing IHeuristic in your IActuator implementations - If you have any custom actuators, you can now implement the `IHeuristicProvider` interface to have your actuator