o
    ?eH                     @   s|  d Z ddlZddlmZ ddlmZ ddlmZ ddlmZ	 ddlm
Z
 ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlm Z  ddl!m"Z" dZ#dZ$dbddZ%e&d e&d!d"d# Z'	dcd$d%Z(d&d' Z)d(d) Z*d*d+ Z+d,d- Z,d.d/ Z-d0d1 Z.d2d3 Z/d4d5 Z0d6d7 Z1d8d9 Z2d:d; Z3d<d= Z4d>d? Z5d@dA Z6dBdC Z7dDdE Z8dFdG Z9dHdI Z:dJdK Z;dLdM Z<dNdO Z=dPdQ Z>dRdS Z?dTdU Z@G dVdW dWejAZB	X	dddYdXZCe&dZe&d[d\d] ZD		ded^d_ZEd`da ZFdS )fzcond_v2 and gradient.

This is a version of cond that emits a single If op, as well as the gradient
function for If ops produced by cond_v2. This will eventually replace the
current tf.cond implementation once it reaches feature and performance parity.
    N)	types_pb2)backprop_util)auto_control_deps)auto_control_deps_utils)constant_op)dtypes)errors_impl
func_graph)indexed_slices)opstensor)tensor_shape)tensor_util)	type_spec)	array_ops)control_flow_util)control_flow_util_v2)default_gradient)gen_functional_ops)gen_optional_ops)gradients_util)handle_data_util)math_ops)nest      condc           
   
   C   s  t | tr
td| |sd}t|n}t|d}t|d}t j}t	| } t
| r>| jjdu s9| jjr>t| } tj||g i tj|t jd|| d}tj||g i tj|t jd|| d}	tt||	g t| ||	|j|	jd|d	W  d   S 1 sw   Y  dS )
z*Like tf.cond, except emits a single If op.zpred must not be a Python boolr   truefalseNcollectionsr
   add_control_dependenciesZop_return_valueF)building_gradientname)
isinstancebool	TypeErrorr   
name_scopeutilunique_fn_nameget_default_graph_add_control_dependenciesconvert_to_tensorr   Z
is_tf_typeshapedimsr   Z
squeeze_v2func_graph_modulefunc_graph_from_py_funcCondBranchFuncGraph_collectionsverify_captures_COND_build_condexternal_captures)
predtrue_fnfalse_fnr&   scopeZ	true_nameZ
false_namer$   
true_graphfalse_graph r@   ^/home/www/facesmatcher.com/pyenv/lib/python3.10/site-packages/tensorflow/python/ops/cond_v2.pycond_v2=   sX   








$rB   StatelessIfIfc                 G   s  | j d j}t|\}}|j|jksJ |j|jksJ t||t|j}t||t|j}t	||g||g |j
s?|j
rtt rX|j}|j}t||g||g\}	}
n|j}|j}t||g||g\}	}
|j |	 |j |
 tt||g | jd7  _| jd7  _|dt| |dt| |d|j |d|j |dd |	D d	d |	D  t||}t||}tt||g t|j d ||||d
d}dg| S )z-The gradient of an If op produced by cond_v2.r   
_rewrittenthen_branchelse_branchToutoutput_shapesc                 S      g | ]}|j qS r@   dtype.0tr@   r@   rA   
<listcomp>       z_IfGrad.<locals>.<listcomp>c                 S   rJ   r@   r0   rM   r@   r@   rA   rP      rQ   T)r%   N)!outputsopget_func_graphsouter_graphgraph_create_grad_funcr+   unique_grad_fn_namer&   _create_zeros_for_none_gradsop_needs_rewriter   GraphOrParentsInXlaContextr   r-   xla_intermediates_make_intermediates_match_xlawrapped_intermediates_make_intermediates_matchextend_check_same_outputsr7   Z_set_func_attrcreate_new_tf_function_set_type_list_attroutput_types_set_shape_list_attrrI   _add_outputs_resolve_grad_inputs$_make_output_composite_tensors_matchr8   inputs)rT   gradsif_opr>   r?   Ztrue_grad_graphZfalse_grad_graphtrue_intermediatesfalse_intermediatesextra_true_outputsextra_false_outputsZtrue_grad_inputsZfalse_grad_inputsrS   r@   r@   rA   _IfGradk   sl   




rq   c                    sT  t t g tt g t g||g}|sMt rMt}t }	t|}
t |	}t g|
|g\}}j	
|  j	
| tt g ttjjt jj 5 dd  D }dd   D }|ss|rwtjntj fdd}t||}W d   n1 sw   Y  dd |D }t }t||S )a%  Creates an If op from the specified predicate, branch functions and inputs.

  Note that this modifies true_graph and false_graph to make the inputs match,
  and to output all intermediates values so they're available for the gradient
  computation.

  true_graph and false_graph need not have the same input types, but they must
  have the same output types.

  Args:
    pred: boolean Tensor
    true_graph: FuncGraph
    false_graph: FuncGraph
    true_inputs: a list of Tensors to be passed to true_graph as input.
    false_inputs: a list of Tensors to be passed to false_graph as input.
    building_gradient: Whether this is a gradient If op.
    name: the name for the If op.

  Returns:
    A list of Tensors which are the outputs of the If op. Does not include added
    intermediate outputs.
  c                 S      g | ]}|j r|qS r@   Z_is_statefulrN   rT   r@   r@   rA   rP     
    z_build_cond.<locals>.<listcomp>c                 S   rr   r@   rs   rt   r@   r@   rA   rP     ru   c                    s   t | dd jD t t  tj jd\}}t|j j |d urTt _t  _|_	 |_
t | t | t| g |j| |S )Nc                 S   rJ   r@   rK   rM   r@   r@   rA   rP     rQ   z1_build_cond.<locals>._make_op.<locals>.<listcomp>rI   r&   )r+   get_op_and_outputsrS   rc   _get_output_shapes_copy_handle_datar   r-   rV   _true_graph_false_graphmaybe_set_lowering_attr*maybe_propagate_compile_time_consts_in_xla#_set_read_only_resource_inputs_attrrW   prevent_fetching)rj   rl   tensorsr?   r&   op_fnr:   r>   r@   rA   _make_op  s*   



z_build_cond.<locals>._make_opNc                 S      g | ]}t |qS r@   r   identityrM   r@   r@   rA   rP   4      )(_make_indexed_slices_indices_types_matchr7   rb   _make_inputs_matchr+   Zoutput_all_intermediates_get_intermediates_wrap_intermediatesr`   rS   ra   r   control_dependencieslistfunction_capturescontrolget_operationsr   Z_ifZstateless_if"run_as_function_for_tape_gradients'_get_compatible_structured_output_specs_pack_sequence_as)r:   r>   r?   Ztrue_inputsZfalse_inputsr%   r&   Zcond_inputsrm   rn   Zwrapped_true_intermediatesZwrapped_false_intermediatesro   rp   Ztrue_stateful_opsZfalse_stateful_opsr   r   Zstructured_output_specsr@   r   rA   r8      sV   3
r8   c                    sn   dfdd	 j dv r dd ddfS j d	v r/ fd
dtdD S tdj )zReturns `FuncGraph`s for the input op branches.

  Args:
    op: The If or Case Operation.

  Returns:
    A tuple of the `FuncGraph`s of the then_branch and else_branch (all branches
    for Case).
  Nc                    s   d}|durt  |d} jdd }|du r&dd |D }t || j}t||jD ]
\}}t|| q,|j	||j  |_
|S )z7Generates and returns a FuncGraph for the given branch.Nr   c                 S   rJ   r@   rR   rM   r@   r@   rA   rP   M  rQ   zGget_func_graphs.<locals>._get_func_graph_for_branch.<locals>.<listcomp>)getattrrj   r+   Zget_func_graphr&   zipr   Zcopy_handle_datar   reset_captures_forward_cond)Zname_attr_listZcached_attr_namer
   rj   Zinput_shapesZ
external_tZ
internal_trT   r@   rA   _get_func_graph_for_branchF  s   z3get_func_graphs.<locals>._get_func_graph_for_branch)rD   rC   rF   rz   rG   r{   )CaseStatelessCasec                    s    g | ]\}} |d  |qS )_branch_graph_{}format)rN   i	branch_fn)r   r@   rA   rP   \  s    z#get_func_graphs.<locals>.<listcomp>brancheszUnsupported op type: {}N)typeZget_attr	enumerate
ValueErrorr   r   r@   )r   rT   rA   rU   ;  s   




rU   c                 C   s   t t| j|jS )zGReturns the most specific compatible specs of graph structured outputs.)r   Zmap_structure_get_compatible_specstructured_outputs)r>   r?   r@   r@   rA   r   b  s   r   c                 C   sF   t | }t |}| | g}|du r!td| d| d|S )a_  Returns the most specific compatible spec.

  Args:
    value_or_spec1: A TypeSpecs or a value that has a defined TypeSpec.
    value_or_spec2: A TypeSpecs or a value that has a defined TypeSpec.

  Returns:
    The most specific compatible TypeSpecs of the input.

  Raises:
    ValueError: If value_or_spec1 is not compatible with value_or_spec2.
  NzNo common supertype of z and .)_get_spec_forZ_without_tensor_namesZmost_specific_common_supertyper)   )Zvalue_or_spec1Zvalue_or_spec2Zspec1Zspec2commonr@   r@   rA   r   i  s   r   c                 C   s   t | tjr| S t| S )zBReturns TypeSpec of a value or itself if it is a TypeSpec already.)r'   r   ZTypeSpecZtype_spec_from_value)Zvalue_or_specr@   r@   rA   r     s   
r   c                 C   sl   t | jt |ksJ g }g }t| j|D ]\}}t|sq|| || qtj|| j|| d}|S )a  The gradient function for each conditional branch.

  This function builds the gradient graph of the corresponding forward-pass
  conditional branch in `func_graph`. This is done by differentiating
  func_graph's outputs w.r.t. its inputs.

  Args:
    func_graph: FuncGraph. The corresponding forward-pass function.
    grads: The list of input gradient Tensors.

  Returns:
    The output gradient Tensors.
  )grad_ysZ	src_graph)	lenrS   r   r   ZIsTrainableappendr   Z_GradientsHelperrj   )r
   rk   Zysr   yZgrad_yresultr@   r@   rA   _grad_fn  s   

r   c                    s$   t j| fddg i t| dS )z1Returns the FuncGraph representation of _grad_fn.c                      s
   t  S r   )r   r@   r
   rk   r@   rA   <lambda>  s   
 z#_create_grad_func.<locals>.<lambda>r	   )r2   r3   _CondGradFuncGraph)r
   rk   r&   r@   r   rA   rX     s
   rX   c                 C   s   g }|j D ]S}|j|jkrS|j| ksJ t|jjD ]\}}||u r+|jjj| } qKqt|jjD ]\}}||u rB|jj | } qKq2tdj|d|j| jksSJ |	| q|S )a  Returns the tensors to pass as inputs to `grad_graph`.

  The `grad_graph` may have external references to
  1. Its outer graph containing the input gradients. These references are kept
     as is.
  2. Tensors in the forward pass graph. These tensors may not be "live"
     when the gradient is being computed. We replace such references by their
     corresponding tensor in `cond_graph.outer_graph`. In the case of nested
     control flow or functions, the gradient logic handling
     `grad_graph.outer_graph` will make sure the tensor from
     `cond_graph.outer_graph` is also correctly captured.

  Args:
    cond_graph: FuncGraph. The forward-pass function.
    grad_graph: FuncGraph. The gradients function.

  Returns:
    A list of inputs tensors to be passed to grad_graph.
  zFCould not find external tensor capture {tensor} in captures or outputsr   )
r9   rW   rV   r   rS   r   Zinternal_capturesr   r   r   )Z
cond_graph
grad_graph
new_inputsrO   r   outputr@   r@   rA   rh     s(   
rh   c                 C   s^   g }|   D ]&}|jD ] }|| jv rq|| jv rq|jtju r q|jdkr&q|| qq|S )zFReturns intermediate tensors of `func_graph` for gradient computation.Z	MutexLock)r   rS   rj   rL   r   resourcer   r   )r
   intermediatesrT   rO   r@   r@   rA   r     s   

	r   c                 C   sT   g }t dd |D }t| D ]\}}t||t||  }||| |  q|S )a  Returns new optionals lists that have matching signatures.

  This is done by mirroring each list in the other using none optionals.
  There is no merging of like optionals.

  Args:
    branch_graphs: `list` of `FuncGraph`.
    branch_optionals: `list` of `list`s of optional `Tensor`s from other
      branch_graphs

  Returns:
    A `list` of `list`s of `Tensor`s for each branch_graph. Each list has the
    same number of `Tensor`s, all of which will be optionals of the same
    shape/type.
  c                 s       | ]}t |V  qd S r   r   )rN   or@   r@   rA   	<genexpr>      z,_make_intermediates_match.<locals>.<genexpr>)maxr   _create_none_optionalsr   r   )branch_graphsZbranch_optionalsZnew_branch_optionalsZintermediates_sizer   branch_graphZother_optionalsr@   r@   rA   r`     s   r`   c                    sz   g }t | D ]4\}t|t fdd D g }tdd  d D }||d|    ||d   q|S )z4Like _make_intermediates_match but for the XLA case.c                 3   s     | ]}|  ur|V  qd S r   r@   rN   Zbibranch_intermediatesr   r@   rA   r     s    
z0_make_intermediates_match_xla.<locals>.<genexpr>c                 s   r   r   r   r   r@   r@   rA   r     r   N)r   _create_fakeparamssumr   )r   r   Znew_branch_intermediatesr   Zother_fakeparamsZnum_precedingr@   r   rA   r^     s   
r^   c                 C   s   t | t |ks
J t }g }|D ]}|D ]}t|}||vr*|| || qqt| |D ];\}}dd |D }tt||j}	g }
|D ]}|		t|}|du r[t
||}|
| qH|
|_|j||j q1|S )a  Modifies branch_graphs so they have the same input signature.

  This method reorders and/or adds parameters to each graph in branch_graphs so
  they have the same input signature, and updates the 'inputs' and 'captured'
  fields of each graph accordingly. It uses the input tensors from the outer
  graph to avoid duplicating shared arguments.

  Args:
    branch_graphs: a `list` of `FuncGraph`
    branch_inputs: a `list` of `list`s of `Tensor`s in the outer graph. The
      inputs for the corresponding graph in `branch_graphs`.

  Returns:
    A new list of Tensors from the outer graph that are the new inputs for each
    branch_graph. This is a deduped version of `sum(branch_inputs)`.
  c                 S   r   r@   )r   	tensor_idrM   r@   r@   rA   rP   @  r   z&_make_inputs_match.<locals>.<listcomp>N)r   setr   r   addr   r   dictrj   get_create_dummy_inputr   r   )r   branch_inputsZadded_inputsr   Z	branch_inr   r   r   Z	input_idsZbranch_input_to_paramZ
input_listZin_tparamr@   r@   rA   r   $  s2   



r   c           
   
   C   s  t | t |ks
J dd |D }dd |D }t t|dks$J |tt| D ]J\}}tdd |D rttdd |D rtt|D ]/\}}|du rs||   t| | j| }||| j	|< W d   n1 snw   Y  qDq*|D ]}	d	d t
|	j	D |	_qwdS )
zCreates zeros for None out grads if at least one branch has non-None grad.

  Args:
    forward_graphs: List of forward FuncGraphs.
    grad_graphs: List of grad FuncGraphs.
  c                 S   rJ   r@   r   rN   gr@   r@   rA   rP   Z  rQ   z0_create_zeros_for_none_grads.<locals>.<listcomp>c                 S      g | ]}t |qS r@   r   rN   Zoutsr@   r@   rA   rP   [      r   c                 s   s    | ]}|d u V  qd S r   r@   rM   r@   r@   rA   r   ^  r   z/_create_zeros_for_none_grads.<locals>.<genexpr>c                 s   s    | ]}|d uV  qd S r   r@   rM   r@   r@   rA   r   _  r   Nc                 S      g | ]}|d ur|qS r   r@   rM   r@   r@   rA   rP   h  s    )r   r   r   r   any
as_defaultr   Z
zeros_likerj   r   r2   flattenrS   )
Zforward_graphsZgrad_graphsbranch_outputsZnum_outputs_per_branch
output_idxbranch_outsbranch_indexrO   Zzerosr   r@   r@   rA   rZ   R  s,   

rZ   c           	   
   C   sD  |sJ dd |D }t dd |D }tt|dks J |tt| D ]a\}}ttdd |D dkr8q&tdd |D sBq&t|D ]@\}}t|tjrQqFt|t	j
rw||   t||| |< W d   n1 sqw   Y  qFtd	j| tkrd
nd||dq&t||D ]\}}||_dd t|D |_qdS )a  Modifies each branch_graph's outputs to have the same output signature.

  Currently the only transformation implemented is turning a Tensor into an
  equivalent IndexedSlices if the other branch returns an IndexedSlices.
  Updates branch_graph.{outputs,structured_outputs} for each branch_graph in
  branch_graphs.

  Args:
    op_type: _COND or _CASE
    branch_graphs: `list` of `FuncGraph`

  Raises:
    TypeError: if a set of outputs cannot be rewritten.
  c                 S   rJ   r@   r   r   r@   r@   rA   rP     rQ   z8_make_output_composite_tensors_match.<locals>.<listcomp>c                 s   r   r   r   r   r@   r@   rA   r     r   z7_make_output_composite_tensors_match.<locals>.<genexpr>r   c                 s   r   r   )r   rN   outr@   r@   rA   r     r   c                 s       | ]	}t |tjV  qd S r   r'   r   IndexedSlicesr   r@   r@   rA   r     s    
NzZCannot reconcile {op_name} {output_idx}-th outputs:
  outputs from all branches: {outputs}tf.condtf.switch_caseop_namer   rS   c                 S   r   r   r@   rM   r@   r@   rA   rP     s
    )r   r   r   r   r   r   r'   r   r   
tensor_libZTensorr   r   Z_as_indexed_slicesr)   r   r7   r   r2   r   rS   )	op_typer   r   Zoutputs_per_branchr   r   
branch_idxZ
branch_outr   r@   r@   rA   ri   n  sH   
ri   c           	   
      s  |sJ g }d}dd |D }dd |D }t t|dks"J |tt| D ]O\}}t tdd |D dkrItdj| tkrBd	nd
||dt|d tj	rX|
|d  t|d rm|t tj|d dd7 }q(|d durw|d7 }q(|s|dS |t |d jkrtd|t |d jf |D ]Z t fdd|D rtdt fdd|D  t t fdd|D dkr|D ],}|j  jtjkr|  t|j  tj|j < W d   n1 sw   Y  qq|D ]
}t|j|j|_qdS )zAMatch dtype of IndexedSlices.indices in outputs of branch_graphs.r   c                 S   s   g | ]
}t j|jd dqS )FZexpand_composites)r   r   r   rN   r   r@   r@   rA   rP     s    z<_make_indexed_slices_indices_types_match.<locals>.<listcomp>c                 S   r   r@   r   r   r@   r@   rA   rP     r   r   c                 s   r   r   r   r   r@   r@   rA   r     s
    

z;_make_indexed_slices_indices_types_match.<locals>.<genexpr>zUCannot reconcile tf.{op_name} {output_idx}-th outputs:
  branches returned: {outputs}r   Zswitch_caser   Tr   NzJInsufficient elements in branch_graphs[0].outputs.
Expected: %i
Actual: %ic                 3   s(    | ]}|j   jtjtjfvV  qd S r   )rS   rL   r   int32int64rN   bgindexr@   rA   r     s     z?Type of IndexedSlices.indices must be int32 or int64. Found: %sc                    s   g | ]}|j   jqS r@   rS   rL   r   r   r@   rA   rP         c                 3   s    | ]	}|j   jV  qd S r   r   r   r   r@   rA   r         )r   r   r   r   r)   r   r7   r'   r   r   r   r   Zis_nested_or_compositer   rS   r   r   strrL   r   r   r   r   castr   r   r   )	r   r   Zindexed_slice_indicesZcurrent_indexZ#branch_outputs_flat_with_compositesZouts_per_branchr   r   r   r@   r   rA   r     st   
r   c                 C   sR   g }d}t j| ddD ]}|du r|d q|||  |d7 }qt| |S )a  Packs the outputs of the gradient If/Case op.

  The branch functions may contain None's in the list of `structured_outputs`.
  `op_outputs` has those outputs missing. So we need to add those Nones to the
  list of `op_outputs` and then pack it in the same structure as
  `structured_outputs`.

  Args:
    structured_outputs: structured_outputs from one of the branch functions.
    op_outputs: List of output tensors of the op.

  Returns:
    `op_outputs` packed like `structured_outputs`.
  r   Tr   Nr   )r   r   r   r2   Zpack_sequence_as)r   Z
op_outputsZoutputs_with_nonescounterr   r@   r@   rA   r     s   
r   c                 C   s:   |    dd |D W  d    S 1 sw   Y  d S )Nc                 S   s   g | ]}t |gqS r@   )r   optional_from_valuerM   r@   r@   rA   rP     r   z'_wrap_intermediates.<locals>.<listcomp>r   )r
   r   r@   r@   rA   r     s   
$r   c                 C   s>   |    tj|j|jdW  d   S 1 sw   Y  dS )zCreates tensors in func_graph to represent template_tensors.

  Args:
    func_graph: FuncGraph.
    template_tensor: a tensor in the outer graph.

  Returns:
    A tensor in func_graph.
  rR   N)r   r   placeholderrL   r0   )r
   Ztemplate_tensorr@   r@   rA   r     s
   

$r   c                 C   s>   |    dd t|D W  d   S 1 sw   Y  dS )zCreates `n` `None` optionals in func_graph.

  Args:
    func_graph: FuncGraph.
    n: `int` the number of `None` optionals to make.

  Returns:
    A list of tensors in func_graph.
  c                 S   s   g | ]}t  qS r@   )r   Zoptional_none)rN   _r@   r@   rA   rP     r   z*_create_none_optionals.<locals>.<listcomp>N)r   range)r
   nr@   r@   rA   r     s   

$r   c                 C   s&   | j du r| S tdd |  D S )a  Converts dynamic dimensions in `shape` to zero.

  The fake params created to match the intermediates captured in other branches
  could have dynamic dimensions. But the XLA shape is not able to handle
  dynamic dimensions in TF TensorShape. Setting the dynamic dimensions to
  size zero will help avoid failing safety checks in bridge. When XLA
  DynamicConditional op reconciles branch differences, XLA will replace the
  dimension size 0 with a bounded dimension determined from the shape of
  real argument in the other branch.

  Note: Rank unknown shapes are returned as they are.

  Args:
    shape: The TensorShape of fake param.

  Returns:
    The new TensorShape with dynamic dimensions set to zero.
  Nc                 S   s   g | ]
}|d u r
dn|qS )Nr   r@   )rN   dr@   r@   rA   rP   :  s    z6_convert_dynamic_dimension_to_zero.<locals>.<listcomp>)Zrankr   TensorShapeas_listrR   r@   r@   rA   "_convert_dynamic_dimension_to_zero#  s
   
r   c                 C   s:   |    dd |D W  d   S 1 sw   Y  dS )z$Creates FakeParams for the XLA case.c                 S   s"   g | ]}t j|jt|jd qS ))rL   r0   )r   Z
fake_paramrL   r   r0   rM   r@   r@   rA   rP   A  s    z&_create_fakeparams.<locals>.<listcomp>Nr   )r
   Ztemplate_tensorsr@   r@   rA   r   >  s
   
$r   c                    s   fdd}t dt D ]x}ztj d j | jdd W n ttfy; } z||t| W Y d}~nd}~ww tkrBdnd	}t d j	t | j	krgtd
j
|t d j	|t | j	dt d j	 | j	D ]\}}|j|jkr||d||f  qrqdS )z3Raises an error if `graphs` have different outputs.c              	      sP   t djtkr	dndtkrdnd| tkrdnd d j |  j|d	)
Nz{b0_name} and {bn_name} arguments to {op_name} must have the same number, type, and overall structure of return values.

{b0_name} output: {b0_out}
{bn_name} output: {bn_out}

Error details:
{detail}r;   zbranches[0]r<   zbranches[{}]r   r   r   )Zb0_nameZbn_namer   b0_outbn_outdetail)r)   r   r7   r   )r   Zerror_detailgraphsr   r@   rA   errorJ  s   z"_check_same_outputs.<locals>.errorr   r   Tr   Nr   casezuLengths of branch outputs of {op_type} must match.
len(graphs[0].outputs): {len_0}
len(graphs[{b}].outputs): {len_b}
)r   Zlen_0blen_bz%s and %s have different types)r   r   r   Zassert_same_structurer   r   r)   r   r7   rS   r   r   rL   )r   r  r  r  eZop_type_strr  r  r@   r  rA   rb   G  s6   
rb   c                  G   sH   g }t |  D ]}|d j}|dd  D ]}||j}q|| q|S )Nr   r   )r   r0   most_specific_compatible_shaper   )branch_graph_outputsrI   Zout_by_branchr0   Z	other_outr@   r@   rA   rx   s  s   
rx   c                 G   s   t | g|R  D ]t}|d }|dd }g }|D ]}t|}|jr)t|jdkr+ q||| qtd}d}	|D ]&}t|jd j	}
|
|
}|	du rU|jd j}	q:|jd j|	kr`tj}	q:|d }|jd j	|  |	|jd _t|| qdS )zGCombines shapes in handle data and sets metadata on `external_tensors`.r   r   N)r   r   Zget_resource_handle_datais_setr   Zshape_and_typer   r   r   r0   r  rL   r   Z
DT_INVALIDZCopyFromZas_protoZset_handle_data)Zexternal_tensorsr  r   ZexternalZinternalZinternal_handle_datar   handle_dataZcombined_shapeZcombined_dtypeZhandle_shapeZcombined_handle_datar@   r@   rA   ry   }  s>   


ry   c                 C   s   dd t |D }t |D ]:\}}|jD ]2}t|tjsF|j|v rF| tkr)ddgn
dd tt|D }t	dj
|j|||j  || dqqd	S )
zCVerify that a branch's tensor is not accessed in another branch fn.c                 S   s   i | ]\}}||qS r@   r@   )rN   r   r   r@   r@   rA   
<dictcomp>  r   z#verify_captures.<locals>.<dictcomp>r;   r<   c                 S   s   g | ]}d  |qS )z	branch {}r   r   r@   r@   rA   rP     s    
z#verify_captures.<locals>.<listcomp>z5Tensor {tname} in {b0name} is accessed from {b1name}.)ZtnameZb0nameZb1nameN)r   r9   r'   r   ZEagerTensorrW   r7   r   r   r   r   r&   )r   r   Zother_branch_graphsr   r   rO   branch_namesr@   r@   rA   r6     s"   

r6   c                       sD   e Zd ZdZ fddZedd Zedd Z fdd	Z  Z	S )
r   a]  FuncGraph for the gradient function of the branch of an If op.

  Handles wrapping and unwrapping intermediate values that are captured by the
  gradient computation in optionals.

  Attributes:
    op_needs_rewrite: True if any intermediates were captured, meaning the
      forward If op needs to be written to output the wrapped intermediates.
  c                    sF   t t| j|t jd d| _|| _i | _t	
 | _g | _i | _d S )Nr!   F)superr   __init__r   r-   r5   r[   _forward_graph_indirect_capturesr"   OrderedDict_wrapped_intermediates_xla_intermediates_captured_constants)selfr&   Zforward_graph	__class__r@   rA   r    s   



z_CondGradFuncGraph.__init__c                 C   s   t | j S )zCThe optional-wrapped intermediates captured from the forward graph.)r   r  valuesr  r@   r@   rA   r_     s   z(_CondGradFuncGraph.wrapped_intermediatesc                 C   s   | j S )zDRaw intermediates captured from the forward graph if XLA is enabled.)r  r  r@   r@   rA   r]     s   z$_CondGradFuncGraph.xla_intermediatesc                    s2  j | jus tfdd| jjD s tfdd| jjD r)tt| |S t	}|| j
v r8| j
| S trPtjtjd| j
|< | j
| S tt rutfdd| jD rl| j d| _tt| |S | j|}|d ur|S jtjkrtjdd | jjD d	d
 | j  D | jj!}tt| | jj| |}nc|| j"vr# D ]  j$dkrt fdd| jjD rӈ jd } qq| j%  t&'g}W d    n1 sw   Y  d| _|| j"|< | j"| }tt| ||}t&(|jgj)gd }|| j|< |S )Nc                 3       | ]} |u V  qd S r   r@   rM   r   r@   rA   r     r   z5_CondGradFuncGraph._capture_helper.<locals>.<genexpr>c                 3   r  r   r@   rM   r   r@   rA   r     r   rK   c                 3   s    | ]} |uV  qd S r   r@   )rN   capturer   r@   rA   r     r   Tc                 S   rJ   r@   r&   rM   r@   r@   rA   rP      rQ   z6_CondGradFuncGraph._capture_helper.<locals>.<listcomp>c                 S   s   i | ]}|j |jqS r@   )r&   Znode_defrt   r@   r@   rA   r    r   z6_CondGradFuncGraph._capture_helper.<locals>.<dictcomp>ZOptionalFromValuec                 3   s    | ]
} j d  |u V  qdS )r   NrS   )rN   r   )consumerr@   rA   r     s    r   )*rW   r  r   rj   rS   r  r   _capture_helperr   r   r  r   is_constantZconstantr   Zconstant_valuerL   r   r\   r-   allr9   r]   r   r[   r  r   r   r   r+   Zresource_input_indexr&   r   Z
_functionsr  Z	consumersr   r   r   r   Zoptional_get_valuer0   )r  r   r&   r   Zcaptured_tensorr   optionalZcaptured_optionalr  )r"  r   rA   r#    sv   




	






z"_CondGradFuncGraph._capture_helper)
__name__
__module____qualname____doc__r  propertyr_   r]   r#  __classcell__r@   r@   r  rA   r     s    


r   indexed_casec           	         s   t | tr
td| t|W  fddtt|D }t j}tj	| dd} g }t
||D ]\}}|tj||g i tj|t jd|| d q0tt| t| |dd |D  |d	W  d
   S 1 siw   Y  d
S )z6Like conv_v2, except emits a Case op instead of an If.z%branch_index must not be a Python intc                    s   g | ]}t  d |qS )zbranch{})r+   r,   r   )rN   r  r=   r@   rA   rP   ,  s    z indexed_case.<locals>.<listcomp>r   r   r!   r#   c                 S   rJ   r@   )r9   r   r@   r@   rA   rP   G  rQ   r&   lower_using_switch_mergeN)r'   intr)   r   r*   r   r   r-   r.   r/   r   r   r2   r3   r+   r4   r5   r6   _CASE_build_case)	r   Z
branch_fnsr&   r0  r  r$   r   branch_namer   r@   r.  rA   r-  #  s@   





$r   r   c              	   G   s  | j d j}t|}|sJ |D ]
}|j|jksJ qg }|D ]}|t||t|j	 qt
|| tdd |D rtt rQdd |D }t||}ndd |D }t||}t||D ]
\}}	|j |	 qbtt| |D ]	}| j	d7  _	qt|dd	d |D  |d
|d j |d|d j |dd |d D dd |d D  dd t||D }
tt| z|d}W n tjy   d}Y nw t |j!d ||
d|d}dg| S )z5The gradient of a Case op produced by tf.switch_case.r   c                 s   s    | ]}|j V  qd S r   )r[   r   r@   r@   rA   r   e  s    z_CaseGrad.<locals>.<genexpr>c                 S   rJ   r@   )r]   )rN   branch_grad_graphr@   r@   rA   rP   q  s    z_CaseGrad.<locals>.<listcomp>c                 S   rJ   r@   )r_   r   r@   r@   rA   rP   x  s    rE   r   c                 S   r   r@   r+   rc   r   r@   r@   rA   rP     s    rH   rI   c                 S   rJ   r@   rK   rM   r@   r@   rA   rP     rQ   c                 S   rJ   r@   rR   rM   r@   r@   rA   rP     rQ   c                 S   s   g | ]	\}}t ||qS r@   )rh   )rN   r   r5  r@   r@   rA   rP     s
    Z_lower_using_switch_mergeNZgradientr/  )"rS   rT   rU   rV   rW   r   rX   r+   rY   r&   rZ   r   r   r\   r   r-   r^   r`   r   ra   rb   r2  Z_set_func_list_attrrd   re   rf   rI   rg   ri   Z_get_attr_boolr   ZNotFoundErrorr3  rj   )rT   rk   case_opr   r   Zbranch_grad_graphsZbranches_intermediatesZextra_branch_outputsr   Zextra_outputsZbranches_grad_inputsZloweringrS   r@   r@   rA   	_CaseGradL  sv   




r8  c           
         s   t t  tt  t |}g } D ]}|dd | D  q|r(tjntjt	
tdd  D g   fdd}t||}	W d   n1 sSw   Y  dd |	D }	t d	 j|	S )
a  Creates an `Case` op from `branch_index`, branch graphs and inputs.

  Note that this modifies `branch_graphs` to make the inputs match, and to
  output all intermediates values so they're available for the gradient
  computation.

  `branch_graphs` need not have the same input types, but they must
  have the same output types.

  Args:
    branch_index: integer Tensor
    branch_graphs: List of FuncGraph
    branch_inputs: List of lists of Tensors to be passed to corresponding
      branch_graph as input.
    name: the name for the Case op.
    lower_using_switch_merge: Lower this op using switch merge ops (optional).

  Returns:
    A list of Tensors which are the outputs of the Case op. Does not include
    added intermediate outputs.
  c                 S   s   g | ]	}t |r|qS r@   )r   Zop_is_statefulrt   r@   r@   rA   rP     s
    
z_build_case.<locals>.<listcomp>c                 s   s    | ]	}t |jjV  qd S r   )r   r   r   r   r@   r@   rA   r     r   z_build_case.<locals>.<genexpr>c              
      s   t | dd  d jD dd  D tdd  D  d\}}t|gdd  D R   |d urat | t | t|  |j	| t
 D ]\}}t |_t|d|| qN|S )	Nc                 S   rJ   r@   rK   rM   r@   r@   rA   rP     rQ   z1_build_case.<locals>._make_op.<locals>.<listcomp>r   c                 S   r   r@   r6  r   r@   r@   rA   rP     r   c                 S   rJ   r@   r!  r   r@   r@   rA   rP     rQ   rv   c                 S   rJ   r@   r!  r   r@   r@   rA   rP     rQ   r   )r+   rw   rS   rx   ry   r|   r}   r~   rW   r   r   r   r-   rV   setattrr   )rj   r7  r   r   r   r   r   r0  r&   r   r@   rA   r     s"   


z_build_case.<locals>._make_opNc                 S   r   r@   r   rM   r@   r@   rA   rP     r   r   )r   r2  rb   r   ra   r   r   r  Zstateless_caser   r   r   r+   r   r   r   )
r   r   r   r&   r0  Zcase_inputsZstateful_opsr   r   r   r@   r:  rA   r3    s&   




#r3  c                 C   s   t tt| jd }|D ] }t|jt| jd ksJ d|s# q.t|}||}qdd |D }t| tj	t
| dS )zSets the list of resource inputs which are read-only.

  This is used by AutomaticControlDependencies.

  Args:
    op: If or Case Operation.
    branch_graphs: List of branch FuncGraphs.
  r   zshould never happenc                 S   s   g | ]}|d  qS )r   r@   )rN   r   r@   r@   rA   rP     r   z7_set_read_only_resource_inputs_attr.<locals>.<listcomp>N)r   r   r   rj   acdZ*get_read_only_resource_input_indices_graphintersectionr   Zset_int_list_attrZREAD_ONLY_RESOURCE_INPUTS_ATTRsorted)rT   r   Zread_only_indicesr   Zbranch_read_only_indicesr@   r@   rA   r~      s    
r~   )r   r   )r-  N)NN)Gr*  r"   Ztensorflow.core.frameworkr   Ztensorflow.python.eagerr   Ztensorflow.python.frameworkr   r   r;  r   r   r   r
   r2   r   r   r   r   r   r   r   Ztensorflow.python.opsr   r   r   r+   r   r   r   r   r   r   Ztensorflow.python.utilr   r7   r2  rB   ZRegisterGradientrq   r8   rU   r   r   r   r   rX   rh   r   r`   r^   r   rZ   ri   r   r   r   r   r   r   r   rb   rx   ry   r6   r4   r   r-  r8  r3  r~   r@   r@   r@   rA   <module>   s   

.[
y'&4.3C	,
"s
)`
U