Skip to content

Commit

Permalink
Updates from containers (#711)
Browse files Browse the repository at this point in the history
  • Loading branch information
nvidia-merlin-bot authored Oct 28, 2022
1 parent 0b3bf01 commit 87c1968
Showing 1 changed file with 114 additions and 0 deletions.
114 changes: 114 additions & 0 deletions docs/data.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,44 @@
"transformers4rec": "0.1.11+1.g3367d725",
"triton": "2.23.0"
},
"22.09": {
"base_container": "Triton version 22.08",
"compressedSize": "10.83 GB",
"cublas": "11.10.3.66",
"cuda": "11.7.1.017",
"cudf": "22.06.00a+319.g97422602b8",
"cudnn": "8.5.0.96",
"cufft": "10.7.2.91",
"curand": "10.2.10.91",
"cusolver": "11.4.0.1",
"cusparse": "11.7.4.91",
"cutensor": "1.6.0.2",
"dgx_system": "* DGX-1\n* DGX-2\n* DGX A100\n* DGX Station",
"gpu_model": "* `NVIDIA Ampere GPU Architecture <https://www.nvidia.com/en-us/geforce/turing>`_\n* `Turing <https://www.nvidia.com/en-us/geforce/turing/>`_\n* `Volta <https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/>`_\n* `Pascal <https://www.nvidia.com/en-us/data-center/pascal-gpu-architecture/>`_",
"hugectr": "4.0.0",
"hugectr2onnx": "Not applicable",
"merlin.core": "0.7.0",
"merlin.models": "0.8.0",
"merlin.systems": "0.6.0",
"nvidia_driver": "NVIDIA Driver version 465.19.01\nor later is required. However,\nif you're running on Data Center\nGPUs (formerly Tesla) such as T4,\nyou can use any of the following\nNVIDIA Driver versions:\n\n* 418.40 (or later R418)\n* 440.33 (or later R440)\n* 450.51 (or later R450)\n* 460.27 (or later R460)\n\n**Note**: The CUDA Driver\nCompatibility Package does not\nsupport all drivers.",
"nvidia_pytorch": "Not applicable",
"nvidia_tensorflow": "Not applicable",
"nvtabular": "1.5.0",
"openmpi": "4.1.2rc4",
"os": "Ubuntu 20.04.4 LTS",
"python_major": "3",
"pytorch": "Not applicable",
"release": "22.09",
"rmm": "22.06.00a+76.g185c18e6",
"size": "143.4 GB",
"sm": "60, 61, 70, 75, 80",
"sparse_operation_kit": "Not applicable",
"tensorrt": "8.4.2.4+cuda11.6.2.010",
"tf": "Not applicable",
"timestamp_utc": "2022-10-28T17:53:57.770226",
"transformers4rec": "0.1.13",
"triton": "2.25.0"
},
"22.10": {
"base_container": "Triton version 22.08",
"compressedSize": "11.49 GB",
Expand Down Expand Up @@ -490,6 +528,44 @@
"transformers4rec": "0.1.11",
"triton": "2.23.0"
},
"22.09": {
"base_container": "Triton version 22.08",
"compressedSize": "10.35 GB",
"cublas": "11.10.3.66",
"cuda": "11.7.1.017",
"cudf": "22.06.00a+319.g97422602b8",
"cudnn": "8.5.0.96",
"cufft": "10.7.2.91",
"curand": "10.2.10.91",
"cusolver": "11.4.0.1",
"cusparse": "11.7.4.91",
"cutensor": "1.6.0.2",
"dgx_system": "* DGX-1\n* DGX-2\n* DGX A100\n* DGX Station",
"gpu_model": "* `NVIDIA Ampere GPU Architecture <https://www.nvidia.com/en-us/geforce/turing>`_\n* `Turing <https://www.nvidia.com/en-us/geforce/turing/>`_\n* `Volta <https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/>`_\n* `Pascal <https://www.nvidia.com/en-us/data-center/pascal-gpu-architecture/>`_",
"hugectr": "Not applicable",
"hugectr2onnx": "Not applicable",
"merlin.core": "0.7.0",
"merlin.models": "0.8.0",
"merlin.systems": "0.6.0",
"nvidia_driver": "NVIDIA Driver version 465.19.01\nor later is required. However,\nif you're running on Data Center\nGPUs (formerly Tesla) such as T4,\nyou can use any of the following\nNVIDIA Driver versions:\n\n* 418.40 (or later R418)\n* 440.33 (or later R440)\n* 450.51 (or later R450)\n* 460.27 (or later R460)\n\n**Note**: The CUDA Driver\nCompatibility Package does not\nsupport all drivers.",
"nvidia_pytorch": "Not applicable",
"nvidia_tensorflow": "Not applicable",
"nvtabular": "1.5.0",
"openmpi": "4.1.2rc4",
"os": "Ubuntu 20.04.4 LTS",
"python_major": "3",
"pytorch": "1.12.1",
"release": "22.09",
"rmm": "22.06.00a+76.g185c18e6",
"size": "144.29 GB",
"sm": "Not applicable",
"sparse_operation_kit": "Not applicable",
"tensorrt": "8.4.2.4+cuda11.6.2.010",
"tf": "Not applicable",
"timestamp_utc": "2022-10-28T17:54:55.225705",
"transformers4rec": "0.1.13",
"triton": "2.25.0"
},
"22.10": {
"base_container": "Triton version 22.08",
"compressedSize": "10.96 GB",
Expand Down Expand Up @@ -1017,6 +1093,44 @@
"transformers4rec": "0.1.11+1.g3367d725",
"triton": "2.23.0"
},
"22.09": {
"base_container": "Triton version 22.08",
"compressedSize": "11.5 GB",
"cublas": "11.10.3.66",
"cuda": "11.7.1.017",
"cudf": "22.06.00a+319.g97422602b8",
"cudnn": "8.5.0.96",
"cufft": "10.7.2.91",
"curand": "10.2.10.91",
"cusolver": "11.4.0.1",
"cusparse": "11.7.4.91",
"cutensor": "1.6.0.2",
"dgx_system": "* DGX-1\n* DGX-2\n* DGX A100\n* DGX Station",
"gpu_model": "* `NVIDIA Ampere GPU Architecture <https://www.nvidia.com/en-us/geforce/turing>`_\n* `Turing <https://www.nvidia.com/en-us/geforce/turing/>`_\n* `Volta <https://www.nvidia.com/en-us/data-center/volta-gpu-architecture/>`_\n* `Pascal <https://www.nvidia.com/en-us/data-center/pascal-gpu-architecture/>`_",
"hugectr": "Not applicable",
"hugectr2onnx": "Not applicable",
"merlin.core": "0.7.0",
"merlin.models": "0.8.0",
"merlin.systems": "0.6.0",
"nvidia_driver": "NVIDIA Driver version 465.19.01\nor later is required. However,\nif you're running on Data Center\nGPUs (formerly Tesla) such as T4,\nyou can use any of the following\nNVIDIA Driver versions:\n\n* 418.40 (or later R418)\n* 440.33 (or later R440)\n* 450.51 (or later R450)\n* 460.27 (or later R460)\n\n**Note**: The CUDA Driver\nCompatibility Package does not\nsupport all drivers.",
"nvidia_pytorch": "Not applicable",
"nvidia_tensorflow": "Not applicable",
"nvtabular": "1.5.0",
"openmpi": "4.1.2rc4",
"os": "Ubuntu 20.04.4 LTS",
"python_major": "3",
"pytorch": "Not applicable",
"release": "22.09",
"rmm": "22.06.00a+76.g185c18e6",
"size": "144.98 GB",
"sm": "Not applicable",
"sparse_operation_kit": "1.1.4",
"tensorrt": "8.4.2.4+cuda11.6.2.010",
"tf": "2.9.1",
"timestamp_utc": "2022-10-28T17:55:56.450143",
"transformers4rec": "0.1.13",
"triton": "2.25.0"
},
"22.10": {
"base_container": "Triton version 22.08",
"compressedSize": "12.18 GB",
Expand Down

0 comments on commit 87c1968

Please sign in to comment.