Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion python/paddle/incubate/cc/ap/facade_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@


class FacadeOp:

def __init__(self):
self.custom_op_name_ = self.custom_op_name()
self.infer_meta_ = self._check_to_str_pair(self.infer_meta())
Expand Down
12 changes: 0 additions & 12 deletions python/paddle/incubate/cc/ap/pir_attrs_serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@


class PirAttrsSerializer:

def __init__(self, func):
self.attributes_schema = self._get_attributes_schema(func)
self._check_attributes_schema(self.attributes_schema)
Expand Down Expand Up @@ -107,7 +106,6 @@ def _get_schema_item_as_key(self, schema_item):


class PirAttributeSerializer:

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -117,7 +115,6 @@ def __call__(self, value):


class BoolAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -127,7 +124,6 @@ def __call__(self, value):


class IntAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -137,7 +133,6 @@ def __call__(self, value):


class FloatAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -147,7 +142,6 @@ def __call__(self, value):


class StrAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -157,7 +151,6 @@ def __call__(self, value):


class DTypeAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -168,7 +161,6 @@ def __call__(self, value):


class BoolArrayAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -180,7 +172,6 @@ def __call__(self, value):


class IntArrayAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -192,7 +183,6 @@ def __call__(self, value):


class FloatArrayAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -204,7 +194,6 @@ def __call__(self, value):


class StrArrayAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand All @@ -216,7 +205,6 @@ def __call__(self, value):


class DTypeArrayAttributeSerializer(PirAttributeSerializer):

def __init__(self, attr_name):
self.attr_name = attr_name

Expand Down
2 changes: 0 additions & 2 deletions python/paddle/incubate/nn/functional/fp8.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,6 @@ def fused_transpose_split_quant(
def fused_transpose_wlch_split_quant(
x: Tensor, tokens_per_expert: Sequence[int], pow_2_scales: bool = False
) -> tuple[list[Tensor], list[Tensor]]:

tokens_per_expert = [int(t) for t in tokens_per_expert]

if in_dynamic_or_pir_mode():
Expand Down Expand Up @@ -323,7 +322,6 @@ def fp8_gemm_blockwise(
is_a_1d_scaled: bool = True,
is_b_1d_scaled: bool = True,
):

assert bias is None, "Bias is not supported"

if bias is None:
Expand Down
1 change: 0 additions & 1 deletion python/paddle/incubate/nn/functional/fused_bias_act.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def fused_bias_act(
[3, 5]
"""
if in_dynamic_or_pir_mode():

return _C_ops.fused_bias_act(
x,
bias,
Expand Down
1 change: 0 additions & 1 deletion python/paddle/jit/dy2static/convert_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,6 @@ def convert_load(x):

# get the new output of the var
if isinstance(x, Value):

from paddle.jit.pir_dy2static.parameter_recorder import (
_global_inplace_map,
)
Expand Down
1 change: 0 additions & 1 deletion python/paddle/jit/dy2static/program_translator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1642,7 +1642,6 @@ def __init__(self):
self._recent_cache_key = None

def _build_once(self, cache_key):

if use_pir_api():
concrete_program = ConcreteProgram.pir_from_func_spec(
func_spec=cache_key.function_spec,
Expand Down
1 change: 0 additions & 1 deletion python/paddle/jit/dy2static/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ class CUDAGraphState(IntEnum):


class TransformOptions:

class ToStaticMode(Flag):
SOT = auto()
AST = auto()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1071,7 +1071,6 @@ def is_not_func(var: VariableBase, other: VariableBase):
def apply_op_with_zero_division_check(
op: BinaryOp, lhs: VariableBase, rhs: VariableBase
):

graph = lhs.graph
if op in NEED_GUARD_ZERO_DIVISION_ERROR_OPS:
call_eq = BuiltinVariable(operator.eq, graph, DanglingTracker())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,6 @@ class VariableBase:
mutable_attrs = []

def __init__(self, graph: FunctionGraph, tracker: Tracker):

self.graph = graph
self.tracker = tracker
self.id = VariableBase.name_generator.next()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def convert_instruction(instr: dis.Instruction) -> Instruction:


def expand_super_instrs(instructions: list[Instruction]) -> list[Instruction]:

expanded_instrs = []

def replace_jump_target(instrs, old_target, new_target):
Expand Down
1 change: 0 additions & 1 deletion python/paddle/jit/sot/symbolic/statement_ir.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ class StatementContext: ...


class StatementContextRegistry:

_ctx_map: dict[
type[Any],
Callable[[Any], AbstractContextManager[None]],
Expand Down
6 changes: 0 additions & 6 deletions python/paddle/jit/sot/utils/info_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@ def summary(cls, history: list[Self]) -> str: ...

@classmethod
def serialize(cls, obj: dict[str:Any]) -> str:

json_data = json.dumps(obj)
b64_bytes = base64.b64encode(json_data.encode(ENCODING))

Expand Down Expand Up @@ -334,7 +333,6 @@ def classify(cls, history: list[Self]) -> str:

@classmethod
def summary(cls, history: list[Self]) -> str:

reason_dict, reason_list = cls.classify(history)

return "\n".join(
Expand All @@ -346,7 +344,6 @@ def summary(cls, history: list[Self]) -> str:

@classmethod
def json_report(cls, history: list[Self]) -> str:

reason_dict, sorted_reasons = cls.classify(history)
reason_dict["count"] = {k: len(v) for k, v in sorted_reasons}
serialized = cls.serialize({cls.SHORT_NAME: reason_dict})
Expand All @@ -364,7 +361,6 @@ def restore_from_string(cls, serialized: str) -> list[Self]:
obj.pop("count")

for classname in obj:

ReasonClass = getattr(exceptions, classname, None)
for reason in obj[classname]:
history.append(cls(ReasonClass(reason_str=reason)))
Expand Down Expand Up @@ -446,7 +442,6 @@ def restore_from_string(cls, serialized: str) -> list[Self]:
obj = cls.deserialize(serialized)[cls.SHORT_NAME]

for entry in obj:

history.append(
SubGraphInfo(
graph=entry["Graph"],
Expand All @@ -458,7 +453,6 @@ def restore_from_string(cls, serialized: str) -> list[Self]:
return history

def __eq__(self, other):

need_graph_equal = "details" in ENV_SOT_COLLECT_INFO.get().get(
self.SHORT_NAME, []
)
Expand Down
1 change: 0 additions & 1 deletion python/paddle/nn/functional/flash_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,6 @@ def _math_attention(


def _select_sdp_cuda(head_dim: int) -> str:

if head_dim <= 256:
return "flash_attn"
else:
Expand Down
1 change: 0 additions & 1 deletion python/paddle/nn/layer/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1516,7 +1516,6 @@ def _build_once(self, *args: Any, **kwargs: Any) -> None:
pass

def _dygraph_call_func(self, *inputs: Any, **kwargs: Any) -> Any:

for hook_id, forward_pre_hook in self._forward_pre_hooks.items():
if hook_id in self._forward_pre_hooks_with_kwargs_flag:
args_kwargs_result = forward_pre_hook(self, inputs, kwargs)
Expand Down
1 change: 1 addition & 0 deletions python/paddle/nn/quant/format.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define some layers used to export quantization model with ONNX style."""

from __future__ import annotations

import abc
Expand Down
1 change: 1 addition & 0 deletions python/paddle/nn/quant/qat/conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"""
Layers used for QAT.
"""

from paddle.nn import functional as F

from ...layer.layers import Layer
Expand Down
1 change: 0 additions & 1 deletion python/paddle/quantization/quanters/abs_max.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,6 @@ def static_forward(self, input):
return quant_out

def pir_forward(self, input):

state = self._state if self.training else None
accum = self._accum if self.training else None

Expand Down
1 change: 1 addition & 0 deletions python/paddle/static/nn/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
"""
All layers just related to metric.
"""

import numpy as np

import paddle
Expand Down
1 change: 0 additions & 1 deletion python/paddle/tensorrt/impls/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,6 @@ def clip_converter(network, paddle_op, inputs):
def _get_constant_or_expand_tensor(
value, constant_inputs, input_shape_tensor, rank, name=None
):

if value is not None:
return fill_constant_layer(
network,
Expand Down
1 change: 0 additions & 1 deletion test/ap/test_matmul_add_relu.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ def foo(
w: pct.Tensor([K, N], DType),
b: pct.Tensor([B, M, N], DType),
):

y = paddle.matmul(x, w)
tmp = paddle.nn.functional.relu(y)
tmp2 = tmp + b
Expand Down