
    BVh                         d Z ddlZddlZ G d dej                        Z e       Zej                  d        Zd Zd Z	y)z9Helper library for functions used during TPU compilation.    Nc                   ,    e Zd ZdZd Zed        Zd Zy)
TpuContextzEA context object holding state about the TPU computation being built.c                     d| _         y)zCreates a new TpuContext.N_number_of_shardsselfs    R/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/tpu/tpu_function.py__init__zTpuContext.__init__   s
    !D    c                     | j                   S Nr   r   s    r
   number_of_shardszTpuContext.number_of_shards   s    !!!r   c                     || _         y r   r   )r	   r   s     r
   set_number_of_shardszTpuContext.set_number_of_shards!   s
    -Dr   N)__name__
__module____qualname____doc__r   propertyr   r    r   r
   r   r      s#    M" " ".r   r   c              #      K   t         j                  t        d      	 t         j                  |        d t         j                  d       y# t         j                  d       w xY ww)z3A context manager setting current number of shards.Nz"tpu_shard_context cannot be nested)_current_tpu_contextr   NotImplementedErrorr   )r   s    r
   tpu_shard_contextr   *   sT      **6
B
CC4--.>?	--d3--d3s   A(A A(A%%A(c                      t         S r   )r   r   r   r
   get_tpu_contextr   6   s    	r   c                      t        | dd       | S )Nstep_marker_location!STEP_MARK_AT_TOP_LEVEL_WHILE_LOOP)setattr)funcs    r
   on_device_training_loopr#   =   s    	$&(KL	+r   )
r   
contextlib	threadinglocalr   r   contextmanagerr   r   r#   r   r   r
   <module>r(      sO     @  . ." "|  4 4r   