Skip to content

Commit

Permalink
Updates from containers
Browse files Browse the repository at this point in the history
  • Loading branch information
nvidia-merlin-bot committed Mar 14, 2023
1 parent ac346bc commit 6d43982
Showing 1 changed file with 120 additions and 0 deletions.
120 changes: 120 additions & 0 deletions docs/data.json
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,46 @@
"timestamp_utc": "2023-01-04T15:17:29.961748",
"transformers4rec": "0.1.16",
"triton": "2.28.0"
},
"23.02": {
"base_container": "Triton version 22.12",
"compressedSize": "5.42 GB",
"cublas": "11.11.3.6",
"cuda": "11.8.0.065",
"cudf": "22.08.00a+304.g6ca81bbc78.dirty",
"cudnn": "8.7.0.84",
"cufft": "10.9.0.58",
"curand": "10.3.0.86",
"cusolver": "11.4.1.48",
"cusparse": "11.7.5.86",
"cutensor": "1.6.1.5",
"dgx_system": "* DGX-1\n* DGX-2\n* DGX A100\n* DGX Station",
"distributed_embeddings": "Not applicable",
"gpu_model": "* `NVIDIA Ampere GPU Architecture <https://www.nvidia.com/en-us/geforce/turing>`_\n* `Turing <https://www.nvidia.com/en-us/geforce/turing/>`_\n* `Volta <https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/>`_\n* `Pascal <https://www.nvidia.com/en-us/data-center/pascal-gpu-architecture/>`_",
"hugectr": "23.2.0",
"hugectr2onnx": "Not applicable",
"merlin.core": "23.2.0",
"merlin.dataloader": "23.2.0",
"merlin.models": "23.2.0",
"merlin.systems": "23.2.0",
"nvidia_driver": "NVIDIA Driver version 465.19.01\nor later is required. However,\nif you're running on Data Center\nGPUs (formerly Tesla) such as T4,\nyou can use any of the following\nNVIDIA Driver versions:\n\n* 418.40 (or later R418)\n* 440.33 (or later R440)\n* 450.51 (or later R450)\n* 460.27 (or later R460)\n\n**Note**: The CUDA Driver\nCompatibility Package does not\nsupport all drivers.",
"nvidia_pytorch": "Not applicable",
"nvidia_tensorflow": "Not applicable",
"nvtabular": "23.2.0",
"openmpi": "4.1.4",
"os": "Ubuntu 20.04.5 LTS",
"python_major": "3",
"pytorch": "Not applicable",
"release": "23.02",
"rmm": "22.08.00a+62.gf6bf047.dirty",
"size": "780.56 GB",
"sm": "60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80, 60, 61, 70, 75, 80",
"sparse_operation_kit": "Not applicable",
"tensorrt": "8.5.1.7",
"tf": "Not applicable",
"timestamp_utc": "2023-03-14T14:56:31.344931",
"transformers4rec": "23.2.0",
"triton": "2.29.0"
}
},
"nvcr.io/nvidia/merlin/merlin-inference": {
Expand Down Expand Up @@ -795,6 +835,46 @@
"timestamp_utc": "2023-01-04T15:16:54.396183",
"transformers4rec": "0.1.16",
"triton": "2.28.0"
},
"23.02": {
"base_container": "Triton version 22.12",
"compressedSize": "6.7 GB",
"cublas": "11.11.3.6",
"cuda": "11.8.0.065",
"cudf": "22.08.00a+304.g6ca81bbc78.dirty",
"cudnn": "8.7.0.84",
"cufft": "10.9.0.58",
"curand": "10.3.0.86",
"cusolver": "11.4.1.48",
"cusparse": "11.7.5.86",
"cutensor": "1.6.1.5",
"dgx_system": "* DGX-1\n* DGX-2\n* DGX A100\n* DGX Station",
"distributed_embeddings": "Not applicable",
"gpu_model": "* `NVIDIA Ampere GPU Architecture <https://www.nvidia.com/en-us/geforce/turing>`_\n* `Turing <https://www.nvidia.com/en-us/geforce/turing/>`_\n* `Volta <https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/>`_\n* `Pascal <https://www.nvidia.com/en-us/data-center/pascal-gpu-architecture/>`_",
"hugectr": "Not applicable",
"hugectr2onnx": "Not applicable",
"merlin.core": "23.2.0",
"merlin.dataloader": "23.2.0",
"merlin.models": "23.2.0",
"merlin.systems": "23.2.0",
"nvidia_driver": "NVIDIA Driver version 465.19.01\nor later is required. However,\nif you're running on Data Center\nGPUs (formerly Tesla) such as T4,\nyou can use any of the following\nNVIDIA Driver versions:\n\n* 418.40 (or later R418)\n* 440.33 (or later R440)\n* 450.51 (or later R450)\n* 460.27 (or later R460)\n\n**Note**: The CUDA Driver\nCompatibility Package does not\nsupport all drivers.",
"nvidia_pytorch": "Not applicable",
"nvidia_tensorflow": "Not applicable",
"nvtabular": "23.2.0",
"openmpi": "4.1.4",
"os": "Ubuntu 20.04.5 LTS",
"python_major": "3",
"pytorch": "1.13.1",
"release": "23.02",
"rmm": "22.08.00a+62.gf6bf047.dirty",
"size": "783.02 GB",
"sm": "Not applicable",
"sparse_operation_kit": "Not applicable",
"tensorrt": "8.5.1.7",
"tf": "Not applicable",
"timestamp_utc": "2023-03-14T14:55:59.518367",
"transformers4rec": "23.2.0",
"triton": "2.29.0"
}
},
"nvcr.io/nvidia/merlin/merlin-pytorch-inference": {
Expand Down Expand Up @@ -1470,6 +1550,46 @@
"timestamp_utc": "2023-01-04T15:16:17.274118",
"transformers4rec": "0.1.16",
"triton": "2.28.0"
},
"23.02": {
"base_container": "Triton version 22.12",
"compressedSize": "6.49 GB",
"cublas": "11.11.3.6",
"cuda": "11.8.0.065",
"cudf": "22.08.00a+304.g6ca81bbc78.dirty",
"cudnn": "8.7.0.84",
"cufft": "10.9.0.58",
"curand": "10.3.0.86",
"cusolver": "11.4.1.48",
"cusparse": "11.7.5.86",
"cutensor": "1.6.1.5",
"dgx_system": "* DGX-1\n* DGX-2\n* DGX A100\n* DGX Station",
"distributed_embeddings": "0.2.0",
"gpu_model": "* `NVIDIA Ampere GPU Architecture <https://www.nvidia.com/en-us/geforce/turing>`_\n* `Turing <https://www.nvidia.com/en-us/geforce/turing/>`_\n* `Volta <https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/>`_\n* `Pascal <https://www.nvidia.com/en-us/data-center/pascal-gpu-architecture/>`_",
"hugectr": "Not applicable",
"hugectr2onnx": "Not applicable",
"merlin.core": "23.2.0",
"merlin.dataloader": "23.2.0",
"merlin.models": "23.2.0",
"merlin.systems": "23.2.0",
"nvidia_driver": "NVIDIA Driver version 465.19.01\nor later is required. However,\nif you're running on Data Center\nGPUs (formerly Tesla) such as T4,\nyou can use any of the following\nNVIDIA Driver versions:\n\n* 418.40 (or later R418)\n* 440.33 (or later R440)\n* 450.51 (or later R450)\n* 460.27 (or later R460)\n\n**Note**: The CUDA Driver\nCompatibility Package does not\nsupport all drivers.",
"nvidia_pytorch": "Not applicable",
"nvidia_tensorflow": "Not applicable",
"nvtabular": "23.2.0",
"openmpi": "4.1.4",
"os": "Ubuntu 20.04.5 LTS",
"python_major": "3",
"pytorch": "Not applicable",
"release": "23.02",
"rmm": "22.08.00a+62.gf6bf047.dirty",
"size": "783.12 GB",
"sm": "Not applicable",
"sparse_operation_kit": "1.1.4",
"tensorrt": "8.5.1.7",
"tf": "2.10.1",
"timestamp_utc": "2023-03-14T14:55:26.656343",
"transformers4rec": "23.2.0",
"triton": "2.29.0"
}
},
"nvcr.io/nvidia/merlin/merlin-tensorflow-inference": {
Expand Down

0 comments on commit 6d43982

Please sign in to comment.