
    BVh#                         d Z ddlmZ ddlmZ ddlmZ ddl	m
Z
 ddlmZ dZ G d d	      Z G d
 d      Z edg      	 	 	 	 	 	 	 dd       Zy)z(Device function for replicated training.    )node_def_pb2)device)
tf_logging)
server_lib)	tf_export)Variable
VariableV2AutoReloadVariableMutableHashTableMutableHashTableV2MutableHashTableOfTensorsMutableHashTableOfTensorsV2MutableDenseHashTableMutableDenseHashTableV2VarHandleOp$BoostedTreesEnsembleResourceHandleOp*BoostedTreesQuantileStreamResourceHandleOpResourceConditionalAccumulatorDecisionTreeResourcec                       e Zd ZdZd Zd Zy)_RoundRobinStrategyzReturns the next ps task index for placement in round-robin order.

  This class is not to be used directly by users.  See instead
  `replica_device_setter()` below.
  c                      || _         d| _        y)zgCreate a new `_RoundRobinStrategy`.

    Args:
      num_tasks: Number of ps tasks to cycle among.
    r   N)
_num_tasks
_next_task)self	num_taskss     X/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/training/device_setter.py__init__z_RoundRobinStrategy.__init__'   s      DODO    c                 `    | j                   }| j                   dz   | j                  z  | _         |S )a  Choose a ps task index for the given `Operation`.

    Args:
      unused_op: An `Operation` to be placed on ps.

    Returns:
      The next ps task index to use for the `Operation`. Returns the next
      index, in the range `[offset, offset + num_tasks)`.
       )r   r   )r   	unused_optasks      r   __call__z_RoundRobinStrategy.__call__0   s+     ??D*doo=DOKr   N)__name__
__module____qualname____doc__r   r$    r   r   r   r       s    r   r   c                       e Zd ZdZd Zd Zy)_ReplicaDeviceChooserzClass to choose devices for Ops in a replicated training setup.

  This class is not to be used directly by users.  See instead
  `replica_device_setter()` below.
  c                 X    || _         || _        || _        || _        || _        || _        y)a3  Create a new `_ReplicaDeviceChooser`.

    Args:
      ps_tasks: Number of tasks in the `ps` job.
      ps_device: String.  Name of the `ps` job.
      worker_device: String.  Name of the `worker` job.
      merge_devices: Boolean. Set to True to allow merging of device specs.
      ps_ops: List of strings representing `Operation` types that need to be
        placed on `ps` devices.
      ps_strategy: A callable invoked for every ps `Operation` (i.e. matched by
        `ps_ops`), that takes the `Operation` and returns the ps task index to
        use.
    N)	_ps_tasks
_ps_device_worker_device_merge_devices_ps_ops_ps_strategy)r   ps_tasks	ps_deviceworker_devicemerge_devicesps_opsps_strategys          r   r   z_ReplicaDeviceChooser.__init__F   s1     DNDO'D'DDL#Dr   c                    | j                   s|j                  r|j                  S t        j                  j	                  |j                  xs d      }t        |t        j                        r|n|j                  }| j                  r| j                  r|j                  | j                  v rt        j                  j	                  | j                        }|j                  |j                  }}|r(|r||k(  r!|j                  | j                  |            }|j!                  |      }|j#                         S t        j                  j	                  | j$                  xs d      }|j!                  |      }|j#                         S )z~Choose a device for `op`.

    Args:
      op: an `Operation`.

    Returns:
      The device to use for the `Operation`.
     )r#   )r0   r   pydev
DeviceSpecfrom_string
isinstancer   NodeDefnode_defr-   r.   opr1   jobreplacer2   make_merged_spec	to_stringr/   )r   rA   current_devicer@   r4   current_jobps_jobr5   s           r   device_functionz%_ReplicaDeviceChooser.device_function\   s)    299YY%%11"))/rBN  L$8$89rr{{H~~$//hkkT\\.I""..t?i*..	6k	v(=%%4+<+<R+@%A	,,^<i  ""$$001D1D1JKM!22>BM""$$r   N)r%   r&   r'   r(   r   rI   r)   r   r   r+   r+   ?   s    $,!%r   r+   ztrain.replica_device_setter)v1Nc                    |t        |t        j                        r|j                         }n#t        j                  |      j                         }t        j
                  j                  |      j                  }||vs||   yt        ||         } | dk(  ry|t        t              }|st        j                  d       |t        |       }t        |      st        d      t!        | |||||      }	|	j"                  S )a
  Return a `device function` to use when building a Graph for replicas.

  Device Functions are used in `with tf.device(device_function):` statement to
  automatically assign devices to `Operation` objects as they are constructed,
  Device constraints are added from the inner-most context first, working
  outwards. The merging behavior adds constraints to fields that are yet unset
  by a more inner context. Currently the fields are (job, task, cpu/gpu).

  If `cluster` is `None`, and `ps_tasks` is 0, the returned function is a no-op.
  Otherwise, the value of `ps_tasks` is derived from `cluster`.

  By default, only Variable ops are placed on ps tasks, and the placement
  strategy is round-robin over all ps tasks. A custom `ps_strategy` may be used
  to do more intelligent placement, such as
  `tf.contrib.training.GreedyLoadBalancingStrategy`.

  For example,

  ```python
  # To build a cluster with two ps jobs on hosts ps0 and ps1, and 3 worker
  # jobs on hosts worker0, worker1 and worker2.
  cluster_spec = {
      "ps": ["ps0:2222", "ps1:2222"],
      "worker": ["worker0:2222", "worker1:2222", "worker2:2222"]}
  with
  tf.compat.v1.device(tf.compat.v1.train.replica_device_setter(cluster=cluster_spec)):
    # Build your graph
    v1 = tf.Variable(...)  # assigned to /job:ps/task:0
    v2 = tf.Variable(...)  # assigned to /job:ps/task:1
    v3 = tf.Variable(...)  # assigned to /job:ps/task:0
  # Run compute
  ```

  Args:
    ps_tasks: Number of tasks in the `ps` job.  Ignored if `cluster` is
      provided.
    ps_device: String.  Device of the `ps` job.  If empty no `ps` job is used.
      Defaults to `ps`.
    worker_device: String.  Device of the `worker` job.  If empty no `worker`
      job is used.
    merge_devices: `Boolean`. If `True`, merges or only sets a device if the
      device constraint is completely unset. merges device specification rather
      than overriding them.
    cluster: `ClusterDef` proto or `ClusterSpec`.
    ps_ops: List of strings representing `Operation` types that need to be
      placed on `ps` devices.  If `None`, defaults to `STANDARD_PS_OPS`.
    ps_strategy: A callable invoked for every ps `Operation` (i.e. matched by
      `ps_ops`), that takes the `Operation` and returns the ps task index to
      use.  If `None`, defaults to a round-robin strategy across all `ps`
      devices.

  Returns:
    A function to pass to `tf.device()`.

  Raises:
    TypeError if `cluster` is not a dictionary or `ClusterDef` protocol buffer,
    or if `ps_strategy` is provided but not a callable.
  Nr   zQDEPRECATION: It is recommended to set merge_devices=true in replica_device_setterzps_strategy must be callable)r>   r   ClusterSpecas_dictr;   r<   r=   rB   lenlistSTANDARD_PS_OPSloggingwarningr   callable	TypeErrorr+   rI   )
r3   r4   r5   r6   clusterr7   r8   cluster_specps_job_namechoosers
             r   replica_device_setterrY      s    D ':112__&l++G4<<>l""..y9==K,&,{*C*K<,-H]^ /"F	OO	 ! %h/K	+	
2
33!(I}"/F'		 	  r   )r   z/job:psz/job:workerTNNN)r(   tensorflow.core.frameworkr   tensorflow.python.frameworkr   r;   tensorflow.python.platformr   rQ   tensorflow.python.trainingr    tensorflow.python.util.tf_exportr   rP   r   r+   rY   r)   r   r   <module>r_      si    / 2 7 < 1 6+ >>% >%B ,-.#$$-(5(,"&!%&*^! /^!r   