@@ -174,7 +174,7 @@ Full docs can be found at github.com/aws/amazon-` + binName
174174 cli .ByteQuantityMinMaxRangeFlags (gpuMemoryTotal , nil , nil , "Number of GPUs' total memory (Example: 4 GiB)" )
175175 cli .StringFlag (gpuManufacturer , nil , nil , "GPU Manufacturer name (Example: NVIDIA)" , nil )
176176 cli .StringFlag (gpuModel , nil , nil , "GPU Model name (Example: K520)" , nil )
177- cli .Int32MinMaxRangeFlags (inferenceAccelerators , nil , nil , "Total Number of inference accelerators (Example: 4)" )
177+ cli .IntMinMaxRangeFlags (inferenceAccelerators , nil , nil , "Total Number of inference accelerators (Example: 4)" )
178178 cli .StringFlag (inferenceAcceleratorManufacturer , nil , nil , "Inference Accelerator Manufacturer name (Example: AWS)" , nil )
179179 cli .StringFlag (inferenceAcceleratorModel , nil , nil , "Inference Accelerator Model name (Example: Inferentia)" , nil )
180180 cli .StringOptionsFlag (placementGroupStrategy , nil , nil , "Placement group strategy: [cluster, partition, spread]" , []string {"cluster" , "partition" , "spread" })
@@ -387,7 +387,7 @@ Full docs can be found at github.com/aws/amazon-` + binName
387387 GpuMemoryRange : cli .ByteQuantityRangeMe (flags [gpuMemoryTotal ]),
388388 GPUManufacturer : cli .StringMe (flags [gpuManufacturer ]),
389389 GPUModel : cli .StringMe (flags [gpuModel ]),
390- InferenceAcceleratorsRange : cli .Int32RangeMe (flags [inferenceAccelerators ]),
390+ InferenceAcceleratorsRange : cli .IntRangeMe (flags [inferenceAccelerators ]),
391391 InferenceAcceleratorManufacturer : cli .StringMe (flags [inferenceAcceleratorManufacturer ]),
392392 InferenceAcceleratorModel : cli .StringMe (flags [inferenceAcceleratorModel ]),
393393 PlacementGroupStrategy : cli .StringMe (flags [placementGroupStrategy ]),
0 commit comments