property2: $(inputs.value2)
arv:CUDARequirement:
- minCUDADriverVersion: "11.0"
- minCUDAHardwareCapability: "9.0"
- minDeviceCount: 1
- maxDeviceCount: 1
+ cudaVersionMin: "11.0"
+ cudaComputeCapabilityMin: "9.0"
+ deviceCountMin: 1
+ deviceCountMax: 1
{% endcodeblock %}
h2(#RunInSingleContainer). arv:RunInSingleContainer
table(table table-bordered table-condensed).
|_. Field |_. Type |_. Description |
-|minCUDADriverVersion|string|Required. The CUDA SDK version corresponding to the minimum driver version supported by the container (generally, the SDK version 'X.Y' the application was compiled against).|
-|minCUDAHardwareCapability|string|Required. The minimum CUDA hardware capability (in 'X.Y' format) required by the application's PTX or C++ GPU code (will be JIT compiled for the available hardware).|
-|minDeviceCount|integer|Minimum number of GPU devices to allocate on a single node. Required.|
-|maxDeviceCount|integer|Maximum number of GPU devices to allocate on a single node. Optional. If not specified, same as @minDeviceCount@.|
+|cudaVersionMin|string|Required. The CUDA SDK version corresponding to the minimum driver version supported by the container (generally, the SDK version 'X.Y' the application was compiled against).|
+|cudaComputeCapabilityMin|string|Required. The minimum CUDA hardware capability (in 'X.Y' format) required by the application's PTX or C++ GPU code (will be JIT compiled for the available hardware).|
+|deviceCountMin|integer|Minimum number of GPU devices to allocate on a single node. Required.|
+|deviceCountMax|integer|Maximum number of GPU devices to allocate on a single node. Optional. If not specified, same as @minDeviceCount@.|
h2. arv:dockerCollectionPDH
mapPredicate: propertyValue
-- name: CUDARequirement
+- name: cwltool:CUDARequirement
type: record
extends: cwl:ProcessRequirement
inVocab: false
doc: |
- Require support for Nvidia CUDA (GPU hardware acceleration).
+ Require support for NVIDA CUDA (GPU hardware acceleration).
fields:
class:
type: string
- doc: 'arv:CUDARequirement'
+ doc: 'cwltool:CUDARequirement'
jsonldPredicate:
_id: "@type"
_type: "@vocab"
- minCUDADriverVersion:
+ cudaVersionMin:
type: string
- doc: Minimum CUDA driver version to run the software, in X.Y format of the associated CUDA SDK release.
- minCUDAHardwareCapability:
+ doc: |
+ Minimum CUDA version to run the software, in X.Y format. This
+ corresponds to a CUDA SDK release. When running directly on
+ the host (not in a container) the host must have a compatible
+ CUDA SDK (matching the exact version, or, starting with CUDA
+ 11.3, matching major version). When run in a container, the
+ container image should provide the CUDA runtime, and the host
+ driver is injected into the container. In this case, because
+ CUDA drivers are backwards compatible, it is possible to
+ use an older SDK with a newer driver across major versions.
+
+ See https://docs.nvidia.com/deploy/cuda-compatibility/ for
+ details.
+ cudaComputeCapabilityMin:
type: string
doc: Minimum CUDA hardware capability required to run the software, in X.Y format.
- minDeviceCount:
+ deviceCountMin:
type: int?
default: 1
- doc: Minimum number of GPU devices to request, or 1.
- maxDeviceCount:
+ doc: Minimum number of GPU devices to request, default 1.
+ deviceCountMax:
type: int?
- doc: Maximum number of GPU devices to request. If not specified, same as `minDeviceCount`.
+ doc: Maximum number of GPU devices to request. If not specified, same as `deviceCountMin`.
mapPredicate: propertyValue
-- name: CUDARequirement
+- name: cwltool:CUDARequirement
type: record
extends: cwl:ProcessRequirement
inVocab: false
doc: |
- Require support for Nvidia CUDA (GPU hardware acceleration).
+ Require support for NVIDA CUDA (GPU hardware acceleration).
fields:
class:
type: string
- doc: 'arv:CUDARequirement'
+ doc: 'cwltool:CUDARequirement'
jsonldPredicate:
_id: "@type"
_type: "@vocab"
- minCUDADriverVersion:
+ cudaVersionMin:
type: string
- doc: Minimum CUDA driver version to run the software, in X.Y format of the associated CUDA SDK release.
- minCUDAHardwareCapability:
+ doc: |
+ Minimum CUDA version to run the software, in X.Y format. This
+ corresponds to a CUDA SDK release. When running directly on
+ the host (not in a container) the host must have a compatible
+ CUDA SDK (matching the exact version, or, starting with CUDA
+ 11.3, matching major version). When run in a container, the
+ container image should provide the CUDA runtime, and the host
+ driver is injected into the container. In this case, because
+ CUDA drivers are backwards compatible, it is possible to
+ use an older SDK with a newer driver across major versions.
+
+ See https://docs.nvidia.com/deploy/cuda-compatibility/ for
+ details.
+ cudaComputeCapabilityMin:
type: string
doc: Minimum CUDA hardware capability required to run the software, in X.Y format.
- minDeviceCount:
+ deviceCountMin:
type: int?
default: 1
- doc: Minimum number of GPU devices to request, or 1.
- maxDeviceCount:
+ doc: Minimum number of GPU devices to request, default 1.
+ deviceCountMax:
type: int?
- doc: Maximum number of GPU devices to request. If not specified, same as `minDeviceCount`.
+ doc: Maximum number of GPU devices to request. If not specified, same as `deviceCountMin`.
mapPredicate: propertyValue
-- name: CUDARequirement
+- name: cwltool:CUDARequirement
type: record
extends: cwl:ProcessRequirement
inVocab: false
doc: |
- Require support for Nvidia CUDA (GPU hardware acceleration).
+ Require support for NVIDA CUDA (GPU hardware acceleration).
fields:
class:
type: string
- doc: 'arv:CUDARequirement'
+ doc: 'cwltool:CUDARequirement'
jsonldPredicate:
_id: "@type"
_type: "@vocab"
- minCUDADriverVersion:
+ cudaVersionMin:
type: string
- doc: Minimum CUDA driver version to run the software, in X.Y format of the associated CUDA SDK release.
- minCUDAHardwareCapability:
+ doc: |
+ Minimum CUDA version to run the software, in X.Y format. This
+ corresponds to a CUDA SDK release. When running directly on
+ the host (not in a container) the host must have a compatible
+ CUDA SDK (matching the exact version, or, starting with CUDA
+ 11.3, matching major version). When run in a container, the
+ container image should provide the CUDA runtime, and the host
+ driver is injected into the container. In this case, because
+ CUDA drivers are backwards compatible, it is possible to
+ use an older SDK with a newer driver across major versions.
+
+ See https://docs.nvidia.com/deploy/cuda-compatibility/ for
+ details.
+ cudaComputeCapabilityMin:
type: string
doc: Minimum CUDA hardware capability required to run the software, in X.Y format.
- minDeviceCount:
+ deviceCountMin:
type: int?
default: 1
- doc: Minimum number of GPU devices to request, or 1.
- maxDeviceCount:
+ doc: Minimum number of GPU devices to request, default 1.
+ deviceCountMax:
type: int?
- doc: Maximum number of GPU devices to request. If not specified, same as `minDeviceCount`.
+ doc: Maximum number of GPU devices to request. If not specified, same as `deviceCountMin`.
cuda_req, _ = self.get_requirement("http://arvados.org/cwl#CUDARequirement")
if cuda_req:
runtime_constraints["cuda"] = {
- "device_count": cuda_req.get("minDeviceCount", 1),
- "driver_version": cuda_req["minCUDADriverVersion"],
- "hardware_capability": cuda_req["minCUDAHardwareCapability"]
+ "device_count": cuda_req.get("deviceCountMin", 1),
+ "driver_version": cuda_req["cudaVersionMin"],
+ "hardware_capability": cuda_req["cudaComputeCapabilityMin"]
}
if self.timelimit is not None and self.timelimit > 0: