Skip to content

Commit

Permalink
Reapply with fix "Add comments to _is_available and raise exceptions …
Browse files Browse the repository at this point in the history
…in unimple… (#6141)

This reverts commit ddc2e34.

Use `is_xla_device_available` to check CUDA device.
  • Loading branch information
frgossen committed Dec 14, 2023
1 parent bf5edb3 commit b324d4e
Showing 1 changed file with 40 additions and 21 deletions.
61 changes: 40 additions & 21 deletions benchmarks/benchmark_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,35 +56,54 @@ def list_experiment_configs(self):

experiment_configs = []
for experiment_config in self.expand_config_choices(config_choices):
if not self.is_available(experiment_config):
if not self._is_available(experiment_config):
continue

self._add_experiment_env(experiment_config)
experiment_configs.append(experiment_config)
return experiment_configs

def is_available(self, experiment_config):
if experiment_config["dynamo"] and experiment_config[
"dynamo"] not in dynamo.list_backends(exclude_tags=()):
return False
if experiment_config["dynamo"] == "inductor" and not (
experiment_config["accelerator"] == "cuda" and
not experiment_config["xla"]):
return False
if experiment_config["dynamo"] == "openxla_eval" and not (
experiment_config["xla"] and experiment_config["test"] == "eval"):
return False
if experiment_config["dynamo"] == "openxla" and not experiment_config["xla"]:
return False
if (experiment_config["xla"] and
not is_xla_device_available(experiment_config["accelerator"].upper())):
return False
if (experiment_config["accelerator"] == "tpu" and
not experiment_config["xla"]):
def _is_available(self, experiment_config):
cfg_dynamo = experiment_config["dynamo"]
cfg_accelerator = experiment_config["accelerator"]
cfg_xla = experiment_config["xla"]
cfg_test = experiment_config["test"]

# Check that dynamo refers to an existing backend.
if cfg_dynamo is not None and cfg_dynamo not in dynamo.list_backends(
exclude_tags=()):
return False
if (experiment_config["accelerator"] == "cuda" and
not experiment_config["xla"] and not is_xla_device_available("CUDA")):

# Check dynamo backend-specifics constraints.
if cfg_dynamo == "inductor":
if cfg_accelerator != "cuda" or cfg_xla is not None:
return False
elif cfg_dynamo == "openxla_eval":
if cfg_xla is None or cfg_test != "eval":
return False
elif cfg_dynamo == "openxla":
if cfg_xla is None:
return False
else:
raise NotImplementedError

# Check XLA device available if requested.
if cfg_xla is not None and not is_xla_device_available(
cfg_accelerator.upper()):
return False

# Check accelerator contraints.
if cfg_accelerator == "tpu":
if cfg_xla is None:
return False
elif cfg_accelerator == "cuda":
if cfg_xla is None and not is_xla_device_available("CUDA"):
return False
elif cfg_accelerator == "cpu":
pass
else:
raise NotImplementedError

return True

def _add_experiment_env(self, experiment_config):
Expand Down

0 comments on commit b324d4e

Please sign in to comment.