
    Αi                     $    S SK Jr  S SKr SS jrg)    wrapsNc                    ^  U 4S jnU$ )a  
Only for auto parallel align mode, use this decorator to handle None gradients in optimizer step.

This decorator is applied to optimizer step methods to handle cases where parameters
have None gradients. It creates zero gradients for parameters that need gradients
but currently have None gradients.

Args:
    amp_master_grad (bool, optional): Whether to use master gradient mode.
        If True, gradients will be created as float32 regardless of parameter dtype.
        If False, gradients will be created with the same dtype as the parameter.
        Default is False.

Returns:
    function: Decorated step method that handles None gradients.

Example:
    .. code-block:: python

        >>> from __future__ import annotations
        >>> import paddle.distributed as dist
        >>> import types
        >>> from paddle.distributed.auto_parallel._utils import _patch_grads_for_step

        >>> opt = paddle.optimizer.AdamW(
        ...     learning_rate=0.001,
        ...     parameters=self.model.parameters(),
        ...     grad_clip=paddle.nn.ClipGradByGlobalNorm(1.0),
        ...     )
        >>> if dist.in_auto_parallel_align_mode():
        >>>     orig_step = (
        ...         opt.step.__func__ if hasattr(opt.step, "__func__") else opt.step
        ...     )
        >>>     decorator = (
        ...         _patch_grads_for_step(
        ...             amp_master_grad=True
        ...         )
        ...     )
        >>>     new_step = decorator(orig_step)
        >>>     opt.step = types.MethodType(new_step, opt)

c                 4   >^  [        T 5      UU 4S j5       nU$ )Nc                    > U4S jn[        U R                  S   [        5      (       d  U R                   H  nU" U5        M     O'U R                   H  nUS    H  nU" U5        M     M     T" U /UQ70 UD6$ )Nc                 0  > U R                   (       d  U R                  b  g [        U S5      (       a)  [        R                  " U [        R
                  S9U l        g T(       a  [        R
                  OU R                  n[        R                  " XS9U l        g )N	main_grad)dtype)stop_gradientgradhasattrpaddle
zeros_likefloat32r	   r
   )paramr
   amp_master_grads     g/var/www/html/banglarbhumi/venv/lib/python3.13/site-packages/paddle/distributed/auto_parallel/_utils.pyset_param_gradQ_patch_grads_for_step.<locals>.decorator.<locals>.wrapper.<locals>.set_param_gradK   se    &&%***@5+..&,&7&7V^^'EO />FNN5;;E!'!2!25!FEJ    r   params)
isinstance_parameter_listdict_param_groups)selfargskwargsr   r   param_groupr   step_methods         r   wrapper9_patch_grads_for_step.<locals>.decorator.<locals>.wrapperH   s{    
G d2215t<<!11E"5) 2 $(#5#5K!,X!6&u- "7 $6 t5d5f55r   r   )r    r!   r   s   ` r   	decorator(_patch_grads_for_step.<locals>.decoratorG   s!    	{		6 
	6. r    )r   r#   s   ` r   _patch_grads_for_stepr&      s    \6 r   )F)	functoolsr   r   r&   r%   r   r   <module>r(      s      Ir   