@@ -22,59 +22,27 @@ def resolve_defined_ops(op_names: list[str]) -> list[torch._ops.OpOverload]:
2222 Skips operators that fail to resolve (e.g., operators not registered or
2323 model-specific operators not present in the current model).
2424
25- If an operator name doesn't specify an overload (e.g., "vllm::unified_attention"),
26- it will automatically use the .default overload.
27-
2825 Note: Users should inspect the operator graph before lowering and ensure
2926 the specified operators are present in the final graph. Built-in PyTorch
3027 operators (aten::*, torch::*) may be decomposed, fused, or transformed
3128 during Inductor's compilation passes, so use them with caution.
3229
3330 Args:
3431 op_names: List of operator names in PyTorch format
35- (e.g., "vllm::unified_attention" or "vllm::unified_attention .default")
32+ (e.g., "vllm::unified_attention.default")
3633
3734 Returns:
3835 List of successfully resolved operator overloads
3936 """
40- resolved : list [ torch . _ops . OpOverload ] = []
37+ resolved = []
4138 for op_name in op_names :
42- overload : torch ._ops .OpOverload | None = None
43- candidate_names = [op_name ]
44-
45- # When the caller omits an explicit overload (e.g. "namespace::op"),
46- # also try the conventional ".default" suffix.
47- if "." not in op_name .split ("::" )[- 1 ]:
48- candidate_names .append (f"{ op_name } .default" )
49-
50- for candidate in candidate_names :
51- try :
52- op = lookup_op (candidate )
53- except Exception :
54- continue
55-
56- # lookup_op may return either an OpOverload (desired) or an
57- # OpOverloadPacket (collection of overloads).
58- if hasattr (op , "overloads" ):
59- overloads = list (op .overloads ())
60- if "default" in overloads :
61- overload = op .default
62- elif len (overloads ) == 1 :
63- overload = getattr (op , overloads [0 ])
64- else :
65- logger .warning (
66- "Operator '%s' has multiple overloads (%s); please "
67- "specify the desired overload explicitly." ,
68- candidate ,
69- ", " .join (overloads ),
70- )
71- else :
72- overload = op # Already an OpOverload
73-
74- if overload is not None :
75- break
76-
77- resolved .append (overload )
39+ try :
40+ resolved .append (lookup_op (op_name ))
41+ except Exception :
42+ # Skip operators that don't exist (e.g., model-specific ops)
43+ logger .warning (
44+ "Failed to resolve operator for Inductor partition: %s" , op_name
45+ )
7846
7947 return resolved
8048
0 commit comments