
    UhMv                     X   d dl mZmZmZmZ d dlZd dlmZ d dlmc m	Z
 ddlmZ ddlmZmZ ddlmZ ddlmZ ddlmZ dd	lmZ dd
lmZmZ ddlmZmZ ddlmZm Z  ddl!m"Z" ddl#m$Z$m%Z%m&Z&m'Z'm(Z( ddl)m*Z*  e'       rd dl+m,Z, ddl-m.Z.  e(j^                  e0      Z1 G d dejd                        Z3 G d dejd                        Z4d Z5dejl                  de7dejl                  fdZ8	 d4dejd                  dejl                  dejl                  dejl                  d eejl                     d!e9d"e9fd#Z:d5d$Z; G d% d&ejd                        Z< G d' d(e      Z= G d) d*ejd                        Z>e% G d+ d,e              Z?e% G d- d.e?             Z@ G d/ d0ee$      ZAe% G d1 d2e?e             ZBg d3ZCy)6    )CallableOptionalTupleUnionN   )ACT2FN)CacheDynamicCache)GenerationMixin)AttentionMaskConverter)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)
LossKwargsauto_docstringcan_return_tupleis_torch_flex_attn_availablelogging   )
OlmoConfig)	BlockMask)make_flex_block_causal_maskc                   d     e Zd ZdZdeddf fdZdej                  dej                  fdZ xZ	S )OlmoLayerNormz/LayerNorm but with no learnable weight or bias.hidden_sizereturnNc                 2    t         |           |f| _        y N)super__init__normalized_shape)selfr!   	__class__s     x/var/www/catia.catastroantioquia-mas.com/valormas/lib/python3.12/site-packages/transformers/models/olmo/modeling_olmo.pyr&   zOlmoLayerNorm.__init__'   s    !,    hidden_statesc                     |j                   }t        j                  |j                  t        j
                        | j                  d d d      j                  |      S )N)dtypegh㈵>)eps)r.   F
layer_normtotorchfloat32r'   )r(   r,   
orig_dtypes      r*   forwardzOlmoLayerNorm.forward+   sO    "((
||M,,5==,A4CXCXZ^`djnorr
 	
r+   )
__name__
__module____qualname____doc__intr&   r3   Tensorr6   __classcell__r)   s   @r*   r    r    $   s4    9/C /D /
U\\ 
ell 
r+   r    c                   $     e Zd Z fdZd Z xZS )OlmoMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFbias)r%   r&   configr!   intermediate_sizennLinear	gate_projup_proj	down_projr   
hidden_actact_fnr(   rE   r)   s     r*   r&   zOlmoMLP.__init__3   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r+   c                     | j                  | j                  | j                  |            | j                  |      z        }|S r$   )rK   rM   rI   rJ   )r(   xrK   s      r*   r6   zOlmoMLP.forward=   s6    NN4;;t~~a/@#ADLLQRO#ST	r+   )r7   r8   r9   r&   r6   r=   r>   s   @r*   r@   r@   2   s    0r+   r@   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..N   dim)shaper3   cat)rP   x1x2s      r*   rotate_halfrZ   B   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r+   r,   n_repr"   c                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r   N)rV   expandreshape)r,   r[   batchnum_key_value_headsslenhead_dims         r*   	repeat_kvrc   I   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr+   modulequerykeyvalueattention_maskscalingdropoutc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )NrS   r   rR   )rU   r.   )ptrainingr   )rc   num_key_value_groupsr3   matmul	transposerV   rG   
functionalsoftmaxr4   r2   r.   rj   rn   
contiguous)rd   re   rf   rg   rh   ri   rj   kwargs
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r*   eager_attention_forwardr{   U   s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r+   c                 
   | j                   |j                   }}|j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }	|j                  |      |	j                  |      fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )r.   	unsqueezerZ   r2   )
qkcossinposition_idsunsqueeze_dimq_typek_typeq_embedk_embeds
             r*   apply_rotary_pos_embr   o   s|    ( WWaggFF
--
&C
--
&C3w;q>C/0G3w;q>C/0G::fwzz&111r+   c                   ,    e Zd ZdZdedef fdZ	 	 ddej                  de	ej                  ej                  f   de
ej                     de
e   d	e
ej                     d
e	ej                  e
ej                     e
e	ej                        f   fdZ xZS )OlmoAttentionz=Multi-headed attention from 'Attention Is All You Need' paperrE   	layer_idxc                 d   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j
                  |j                  | j                  z  |j                        | _        t        j                  |j                  | j                  z  |j
                  |j                        | _        y )Nrb   g      TrC   )r%   r&   rE   r   getattrr!   num_attention_headsrb   r`   ro   ri   attention_dropout	is_causalrG   rH   attention_biasq_projk_projv_projo_projr(   rE   r   r)   s      r*   r&   zOlmoAttention.__init__   sM   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
r+   r,   position_embeddingsrh   past_key_valuecache_positionr"   c                 V   |j                   d d }g |d| j                  }| j                  |      }	| j                  |      }
| j	                  |      }| j
                  j                  |	j                  | j
                  j                   | j
                  j                         |
j                  | j
                  j                   | j
                  j                         |j                  | j
                  j                   | j
                  j                         |	j                  |      j                  dd      }	|
j                  |      j                  dd      }
|j                  |      j                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        }| j
                  j                  dk7  r^| j
                  j                  dk(  r(|j                  dd	      rt         j#                  d
       nt$        | j
                  j                     } || |	|
||f| j&                  sdn| j(                  | j*                  d|\  }} |j,                  g |d j/                         }| j1                  |      }||fS )NrR   )minmaxr   rS   )r   r   r   eagersdpaoutput_attentionsFz`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.        )rj   ri   )rV   rb   r   r   r   rE   clip_qkvclamp_viewrq   r   updater   r{   _attn_implementationgetloggerwarning_oncer   rn   r   ri   r^   rt   r   )r(   r,   r   rh   r   r   ru   input_shapehidden_shapequery_statesrv   rw   r   r   cache_kwargsattention_interfacerz   rx   s                     r*   r6   zOlmoAttention.forward   sy    $))#2.88b8$--8{{=1[[/
{{=1;;+T[[%9%9$9t{{?S?ST4;;#7#7"7T[[=Q=QRT[[%9%9$9t{{?S?ST#((6@@AF__\2<<QB
#((6@@AF&S#7jRUWZ#[ j%#&snUL'5'<'<ZW[WeWegs't$J(?;;++w6{{//69fjjI\^c>d##L
 '>dkk>^>^&_#$7	%
  $}}C$2H2HLL	%
 	%
!\ *k));;;;FFHkk+.L((r+   )NN)r7   r8   r9   r:   r   r;   r&   r3   r<   r   r   r	   
LongTensorr6   r=   r>   s   @r*   r   r      s    G
z 
c 
8 +/598)||8) #5<<#=>8) !.	8)
 !8) !!1!128) 
u||Xell3XeELL>Q5RR	S8)r+   r   c                   p    e Zd Zdedef fdZ	 	 	 	 	 	 	 ddej                  deej                     deej                     dee
   dee   d	ee   d
eej                     deeej                  ej                  f      dee   deej                  eeej                  ej                  f      f   fdZ xZS )OlmoDecoderLayerrE   r   c                     t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                        | _        t        |j                        | _	        y )N)rE   r   )
r%   r&   r!   r   	self_attnr@   mlpr    input_layernormpost_attention_layernormr   s      r*   r&   zOlmoDecoderLayer.__init__   s[    !--&f	J6?,V-?-?@(5f6H6H(I%r+   r,   rh   r   r   r   	use_cacher   r   ru   r"   c	                     |}
| j                  |      } | j                  d||||||||d|	\  }}|
|z   }|}
| j                  |      }| j                  |      }|
|z   }|f}|r||fz  }|S )N)r,   rh   r   r   r   r   r   r    )r   r   r   r   )r(   r,   rh   r   r   r   r   r   r   ru   residualself_attn_weightsoutputss                r*   r6   zOlmoDecoderLayer.forward   s     !,,]; ,:4>> 
,
')%)/) 3
,
 
,
(( !=0 !55mD/ =0 ")++Gr+   )NNNFFNN)r7   r8   r9   r   r;   r&   r3   r<   r   r   r	   boolr   r   r   FloatTensorr6   r=   r>   s   @r*   r   r      s   Jz Jc J 2637*.,1$)59KO'||' !.' u//0	'
 !' $D>' D>' !!1!12' &eELL%,,,F&GH' -.' 
u  (51B1BEDUDU1U+V"WW	X'r+   r   c                   ^     e Zd Zddef fdZ ej                         ed               Z xZ	S )OlmoRotaryEmbeddingrE   c                    t         |           t        |d      rG|j                  ;|j                  j	                  d|j                  j	                  d            | _        nd| _        |j                  | _        |j                  | _        || _	        t        | j
                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                  | _        y )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)r%   r&   hasattrr   r   r   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrE   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r(   rE   devicer   r)   s       r*   r&   zOlmoRotaryEmbedding.__init__  s    6>*v/B/B/N#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r+   c                    | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	||	fcd d d        S # 1 sw Y   y xY w)
Nr   rR   r   mpscpuF)device_typeenabledrS   rT   )r   floatr]   rV   r2   r   
isinstancer   strr3   autocastrq   rW   r   r   r   )
r(   rP   r   inv_freq_expandedposition_ids_expandedr   freqsembr   r   s
             r*   r6   zOlmoRotaryEmbedding.forward&  s2    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC 	&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C8	 	 	s    BE22E;r$   )
r7   r8   r9   r   r&   r3   no_gradr   r6   r=   r>   s   @r*   r   r     s3    /z /" U]]_
  
r+   r   c                   F    e Zd ZeZdZdZdgZdgZdZ	dZ
dZdZdZdZdZd Zy)OlmoPreTrainedModelmodelTr   past_key_valuesc                    | j                   j                  }t        |t        j                        rY|j
                  j                  j                  d|       |j                  %|j                  j                  j                          y y t        |t        j                        rf|j
                  j                  j                  d|       |j                  2|j
                  j                  |j                     j                          y y y )Nr   )meanstd)rE   initializer_ranger   rG   rH   weightdatanormal_rD   zero_	Embeddingpadding_idx)r(   rd   r   s      r*   _init_weightsz!OlmoPreTrainedModel._init_weightsD  s    kk++fbii(MM&&CS&9{{&  &&( '-MM&&CS&9!!-""6#5#56<<> . .r+   N)r7   r8   r9   r   config_classbase_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_2_supports_sdpa_supports_flex_attn_supports_cache_class_supports_quantized_cache_supports_static_cache_supports_attention_backendr   r   r+   r*   r   r   5  sS    L&*#+,#4"5!N  $!"&	?r+   r   c                       e Zd Zdef fdZd Zd Zee	 	 	 	 	 	 	 	 	 dde	e
j                     de	e
j                     de	e
j                     de	e   d	e	e
j                     d
e	e   de	e   de	e   de	e
j                     dee   defd              Z	 ddee
j                  df   de
j                  de
j                  dedef
dZede
j                  dedede
j0                  de
j                  defd       Z xZS )	OlmoModelrE   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                        | _        t!        |      | _        d| _        | j'                          y c c}w )N)rE   F)r%   r&   pad_token_idr   
vocab_sizerG   r   r!   embed_tokens
ModuleListrangenum_hidden_layersr   layersr    normr   
rotary_embgradient_checkpointing	post_initr   s      r*   r&   zOlmoModel.__init__R  s     !.. ++LL):):F<N<NPTP`P`ammBGH`H`BabYfi0b
 "&"4"45	-V<&+# 	 cs   C5c                     | j                   S r$   r   r(   s    r*   get_input_embeddingszOlmoModel.get_input_embeddingsb  s       r+   c                     || _         y r$   r  r(   rg   s     r*   set_input_embeddingszOlmoModel.set_input_embeddingse  s
    !r+   	input_idsrh   r   r   inputs_embedsr   r   output_hidden_statesr   flash_attn_kwargsr"   c
                    ||n| j                   j                  }||n| j                   j                  }||n| j                   j                  }|d u |d uz  rt	        d      | j
                  r%| j                  r|rt        j                  d       d}t        |t        d       t        f      st	        d      || j                  |      }|r|
t               }|	F||j                         nd}t        j                   |||j"                  d   z   |j$                        }	||	j'                  d      }| j)                  |||	||      }|}| j+                  ||      }|rdnd }|rdnd }| j,                  d | j                   j.                   D ],  }|r||fz  } ||f||||||	|d	|
}|d   }|s$||d   fz  }. | j1                  |      }|r||fz  }t3        ||r|nd ||
      S )Nz:You must specify exactly one of input_ids or inputs_embedszX`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`.FzBThe `past_key_values` should be either a `Cache` object or `None`.r   r   r   r   )rh   r   r   r   r   r   r   )last_hidden_stater   r,   
attentions)rE   r   r  r   
ValueErrorr  rn   r   r   r   r   r	   r   r
   get_seq_lengthr3   arangerV   r   r}   _update_causal_maskr  r  r  r  r   )r(   r  rh   r   r   r  r   r   r  r   r  past_seen_tokensry   r,   r   all_hidden_statesall_self_attnsdecoder_layerlayer_outputss                      r*   r6   zOlmoModel.forwardh  sT    2C1N-TXT_T_TqTq$8$D $++JjJj 	 "+!6IDKK<Q<Q	-t";<YZZ&&4==Yj I /DJ+>?abb  --i8M0*nO!CRC^==?de"\\ "2]5H5H5K"KTaThThN )33A6L..M>?L]
 & #oom\J #7BD0d![[)H4;;+H+HI 	6M#!m%55!)
*)."3#-$7
 $
M *!,M =#3"55'	6* 		-0  -!11&+/8Od+%	
 	
r+   r   input_tensorc           	         | j                   j                  dk(  r||dk(  j                         r|S y | j                   j                  dk(  r't        |t        j
                        rt        |      }|S ||j                         nd}||j                  nd}| j                   j                  dk(  r(|s&|s$t        j                  |||| j                        ry |j                  }|j                  d   }	|r|j                         }
n1t        |t        j
                        r|j                  d	   n||	z   dz   }
| j                  ||	|
|||j                  d   
      }| j                   j                  dk(  rQ|O|j                   j"                  dv r7|s5t	        j$                  |      j&                  }t        j(                  ||      }|S )Nflash_attention_2r   flex_attentionr   Fr   )r  past_key_values_lengthis_trainingr   rR   )sequence_lengthtarget_lengthr.   r   
batch_size)cudaxpunpu)rE   r   anyr   r3   r<   r   r  is_compileabler   _ignore_causal_mask_sdparn   r.   rV   get_max_cache_shape5_prepare_4d_causal_attention_mask_with_cache_positionr   r   finfor   _unmask_unattended)r(   rh   r  r   r   r   r  using_compilable_cacher.   r%  r&  ry   	min_dtypes                r*   r  zOlmoModel._update_causal_mask  s    ;;++/BB)~/D.I.I.K%%;;++/??.%,,7!<^!L!!
 @O?Z?99;`aCRC^!?!?di ;;++v5>T]n%>>*'7 MM	 ""&,,Q/!+??AM nell; $$R(%7!;  PP+')#))!, Q 
 KK,,6*%%**.DD%
 E*..I0CCKQZ[Kr+   r%  r&  r.   r'  c                    | | j                         dk(  r| }|S t        j                  |      j                  }t        j                  ||f|||j
                        }|dk7  rt        j                  |d      }|t        j                  ||j
                        |j                  dd      kD  z  }|ddddddf   j                  |ddd      }| |j                         }| j                  d   }	|ddddddd|	f   | ddddddf   j                  |j
                        z   }
|
dk(  }
|ddddddd|	f   j                  |
|      |ddddddd|	f<   |S )	aM  
        Creates a causal 4D mask of shape `(batch_size, 1, query_length, key_value_length)` from a 2D mask of shape
        `(batch_size, key_value_length)`, or if the input `attention_mask` is already 4D, do nothing.

        Args:
            attention_mask (`torch.Tensor`):
                A 2D attention mask of shape `(batch_size, key_value_length)` or a 4D attention mask of shape
                `(batch_size, 1, query_length, key_value_length)`.
            sequence_length (`int`):
                The sequence length being processed.
            target_length (`int`):
                The target length: when generating with static cache, the mask should be as long as the static cache,
                to account for the 0 padding, the part of the cache that is not filled yet.
            dtype (`torch.dtype`):
                The dtype to use for the 4D attention mask.
            cache_position (`torch.Tensor`):
                Indices depicting the position of the input sequence tokens in the sequence.
            batch_size (`torch.Tensor`):
                Batch size.
        N   )
fill_valuer.   r   r   )diagonalr  rR   r   )rU   r3   r0  r   fullr   triur  r^   r]   clonerV   r2   masked_fill)rh   r%  r&  r.   r   r'  ru   ry   r3  mask_lengthpadding_masks              r*   r/  z?OlmoModel._prepare_4d_causal_attention_mask_with_cache_position  s   < %.*<*<*>!*C(K* ' E*..I** -0Ye\j\q\qK !##jjqA5<<n>S>STWeWmWmnprsWtttK%dD!Q&67>>z1bRTUK))//1,2226*1aL[L+@ANSTVZ\`bcScDdDgDg&&E    ,q05@Aq,;,AV5W5c5c )6Aq!\k\12 r+   )	NNNNNNNNN)F)r7   r8   r9   r   r&   r
  r  r   r   r   r3   r   r<   r	   r   r   r   r   r   r6   r   r  staticmethodr;   r.   r/  r=   r>   s   @r*   r   r   P  s   z  !"  151537+/59$(,0/359\
E,,-\
 !.\
 u//0	\

 "%\
   1 12\
 D>\
 $D>\
 'tn\
 !!1!12\
 $$89\
 
!\
  \
H #(BellK78B llB 	B
 B  BH 444 4 {{	4
 4 4 4r+   r   c                       e Zd Zy)KwargsForCausalLMN)r7   r8   r9   r   r+   r*   r@  r@  D  s    r+   r@  c                       e Zd ZdgZddiZddgdgfiZ fdZd Zd Zd	 Z	d
 Z
d Zd Zee	 	 	 	 	 	 	 	 	 	 	 ddeej"                     deej$                     deej"                     dee   deej(                     deej"                     dee   dee   dee   deej"                     deeej$                  f   dee   defd              Z xZS )OlmoForCausalLMzlm_head.weightlm_headcolwise_repr,   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y rB   )
r%   r&   r   r   r   rG   rH   r!   rC  r  rN   s     r*   r&   zOlmoForCausalLM.__init__M  sU     v&
 ++yy!3!3V5F5FUS 	r+   c                 .    | j                   j                  S r$   r   r   r	  s    r*   r
  z$OlmoForCausalLM.get_input_embeddingsV  s    zz&&&r+   c                 &    || j                   _        y r$   rH  r  s     r*   r  z$OlmoForCausalLM.set_input_embeddingsY  s    "'

r+   c                     | j                   S r$   rC  r	  s    r*   get_output_embeddingsz%OlmoForCausalLM.get_output_embeddings\  s    ||r+   c                     || _         y r$   rK  )r(   new_embeddingss     r*   set_output_embeddingsz%OlmoForCausalLM.set_output_embeddings_  s	    %r+   c                     || _         y r$   r   )r(   decoders     r*   set_decoderzOlmoForCausalLM.set_decoderb  s	    
r+   c                     | j                   S r$   rQ  r	  s    r*   get_decoderzOlmoForCausalLM.get_decodere  s    zzr+   r  rh   r   r   r  labelsr   r   r  r   logits_to_keepru   r"   c                    ||n| j                   j                  }|	|	n| j                   j                  }	 | j                  d||||||||	|
d	|}|j                  }t        |t              rt        | d      n|}| j                  |dd|ddf         }d}|* | j                  d||| j                   j                  d|}t        |||j                  |j                  |j                        S )an  
        labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
            Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
            config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
            (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

        Example:

        ```python
        >>> from transformers import AutoTokenizer, OlmoForCausalLM

        >>> model = OlmoForCausalLM.from_pretrained("meta-olmo/Olmo-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-olmo/Olmo-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```N)	r  rh   r   r   r  r   r   r  r   )rE  rV  r   )lossrE  r   r,   r  r   )rE   r   r  r   r  r   r;   slicerC  loss_functionr   r   r   r,   r  )r(   r  rh   r   r   r  rV  r   r   r  r   rW  ru   r   r,   slice_indicesrE  rY  s                     r*   r6   zOlmoForCausalLM.forwardh  s   N 2C1N-TXT_T_TqTq$8$D $++JjJj 	
 ,64:: ,
)%+'/!5),
 ,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r+   )NNNNNNNNNNr   )r7   r8   r9   _tied_weights_keys_tp_plan_pp_planr&   r
  r  rL  rO  rS  rU  r   r   r   r3   r   r<   r	   r   r   r   r;   r   r@  r   r6   r=   r>   s   @r*   rB  rB  G  s   *+=)H_-z:;H'(&  151537+/59-1$(,0/35934G
E,,-G
 !.G
 u//0	G

 "%G
   1 12G
 ))*G
 D>G
 $D>G
 'tnG
 !!1!12G
 c5<</0G
 *+G
 
 G
  G
r+   rB  )rB  r   r   )r   )Nr   )Dtypingr   r   r   r   r3   torch.nnrG   torch.nn.functionalrr   r0   activationsr   cache_utilsr	   r
   
generationr   modeling_attn_mask_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   r   r   configuration_olmor   !torch.nn.attention.flex_attentionr   integrations.flex_attentionr   
get_loggerr7   r   Moduler    r@   rZ   r<   r;   rc   r   r{   r   r   r   r   r   r   r@  rB  __all__r   r+   r*   <module>rt     s   4 3     ! . ) > B 9 O K F & h h *  !;J 
		H	%
BII 
bii  (	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % %428R)BII R)j11 1h")) B ?/ ? ?4 p# p pf ?,j > i
)? i
 i
X Br+   