@@ -307,9 +307,11 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool:
307307 # Partition module into components that can be TRT-accelerated
308308 fast_partitioner_failed = False
309309
310+ logger .info ("Beginning TensorRT operator Partitioning Phase" )
310311 # If specified, try using the fast partitioner and fall back to the global one on failure
311312 if settings .use_fast_partitioner :
312313 try :
314+ logger .info ("Partitioning the graph via the fast partitioner" )
313315 partitioned_module , supported_ops = partitioning .fast_partition (
314316 gm ,
315317 verbose = settings .debug ,
@@ -319,14 +321,15 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool:
319321 except torch .fx .passes .splitter_base .FxNetSplitterInternalError :
320322 logger .error (
321323 "Partitioning failed on the subgraph with fast partition. See trace above. "
322- + "Retrying with global partition." ,
324+ "Retrying with global partition." ,
323325 exc_info = True ,
324326 )
325327
326328 fast_partitioner_failed = True
327329 settings .use_fast_partitioner = False
328330
329331 if not settings .use_fast_partitioner :
332+ logger .info ("Partitioning the graph via the global partitioner" )
330333 partitioned_module , supported_ops = partitioning .global_partition (
331334 gm ,
332335 verbose = settings .debug ,
@@ -340,6 +343,11 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool:
340343 if not settings .use_fast_partitioner :
341344 dryrun_tracker .to_run_in_torch .extend (parse_non_trt_nodes (partitioned_module ))
342345
346+ logger .info (
347+ "Successfully completed graph partitioning phase. "
348+ "Beginning the conversion phase."
349+ )
350+
343351 # Store TRT replicas of Torch subgraphs
344352 trt_modules = {}
345353 # Iterate over all components that can be accelerated
@@ -364,14 +372,15 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool:
364372 # Get the submodule inputs for min, opt, max shapes of the graph inputs
365373 submodule_inputs = partitioning .construct_submodule_inputs (submodule )
366374
375+ assert submodule_inputs is not None
376+
367377 logger .debug (
368- "Submodule name : %s\n Input shapes: %s\n %s" ,
378+ "Converting submodule : %s\n Input shapes: %s\n %s" ,
369379 str (name ),
370380 [input .shape for input in submodule_inputs ],
371381 str (submodule .graph ),
372382 )
373383
374- assert submodule_inputs is not None
375384 # Handle long/double inputs if requested by the user
376385 if settings .truncate_double :
377386 submodule_inputs = repair_double_inputs (
0 commit comments