-
Notifications
You must be signed in to change notification settings - Fork 235
Closed
apache/tvm
#16690Description
Hi, I am trying to run the build example build.py but I am getting the error:
(webgpu-env) user@le-big-mac repos/web-stable-diffusion (main *) » python3 build.py
Automatically configuring target: metal -keys=metal,gpu -max_function_args=31 -max_num_threads=256 -max_shared_memory_per_block=32768 -max_threads_per_block=1024 -thread_warp_size=32
`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The value `text_config["id2label"]` will be overriden.
`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The value `text_config["bos_token_id"]` will be overriden.
`text_config_dict` is provided which will be used to initialize `CLIPTextConfig`. The value `text_config["eos_token_id"]` will be overriden.
Traceback (most recent call last):
File "~Dropbox/repos/web-stable-diffusion/build.py", line 157, in <module>
mod, params = trace_models(torch_dev_key)
File "~Dropbox/repos/web-stable-diffusion/build.py", line 80, in trace_models
clip = trace.clip_to_text_embeddings(pipe)
File "~Dropbox/repos/web-stable-diffusion/web_stable_diffusion/trace/model_trace.py", line 27, in clip_to_text_embeddings
mod = dynamo_capture_subgraphs(
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/tvm/relax/frontend/torch/dynamo.py", line 198, in dynamo_capture_subgraphs
compiled_model(*params, **kwargs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py", line 408, in _fn
return fn(*args, **kwargs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py", line 569, in catch_errors
return callback(frame, cache_entry, hooks, frame_state)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 671, in _convert_frame
result = inner_convert(frame, cache_entry, hooks, frame_state)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 377, in _convert_frame_assert
return _compile(
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 595, in _compile
guarded_code = compile_inner(code, one_graph, hooks, transform)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/utils.py", line 243, in time_wrapper
r = func(*args, **kwargs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 512, in compile_inner
out_code = transform_code_object(code, transform)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/bytecode_transformation.py", line 1033, in transform_code_object
transformations(instructions, code_options)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 150, in _fn
return fn(*args, **kwargs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 477, in transform
tracer.run()
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 2120, in run
super().run()
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 815, in run
and self.step()
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 778, in step
getattr(self, inst.opname)(inst)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 2235, in RETURN_VALUE
self.output.compile_subgraph(
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 880, in compile_subgraph
self.compile_and_call_fx_graph(tx, list(reversed(stack_values)), root)
File "~miniconda3/envs/webgpu-env/lib/python3.10/contextlib.py", line 79, in inner
return func(*args, **kwds)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 1025, in compile_and_call_fx_graph
compiled_fn = self.call_user_compiler(gm)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/utils.py", line 243, in time_wrapper
r = func(*args, **kwargs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 1096, in call_user_compiler
raise BackendCompilerFailed(self.compiler_fn, e).with_traceback(
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 1077, in call_user_compiler
compiled_fn = compiler_fn(gm, self.example_inputs())
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/_dynamo/repro/after_dynamo.py", line 117, in debug_wrapper
compiled_gm = compiler_fn(gm, example_inputs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/torch/__init__.py", line 1655, in __call__
return self.compiler_fn(model_, inputs_, **self.kwargs)
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/tvm/relax/frontend/torch/dynamo.py", line 184, in _capture
mod_ = from_fx(
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/tvm/relax/frontend/torch/fx_translator.py", line 1635, in from_fx
return TorchFXImporter().from_fx(
File "~miniconda3/envs/webgpu-env/lib/python3.10/site-packages/tvm/relax/frontend/torch/fx_translator.py", line 1520, in from_fx
func_name in self.convert_map
torch._dynamo.exc.BackendCompilerFailed: backend='_capture' raised:
AssertionError: Unsupported function type position_ids
Set TORCH_LOGS="+dynamo" and TORCHDYNAMO_VERBOSE=1 for more information
You can suppress this exception and fall back to eager by setting:
import torch._dynamo
torch._dynamo.config.suppress_errors = TrueOS: macOS Sonoma 14.0
Device: M1 Mac
Python: 3.10
Metadata
Metadata
Assignees
Labels
No labels