
    AVhF                    "a   d Z ddlZddlmZ ddlmZ ddlmZ ddlm	Z
 ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZm Z m!Z! ddl"m#Z#  edddddddddddddddd d!d"d#d$      Z$dDd%e#e!e$f   d&e#e!e$f   d'e#e!e$f   d(e#e!e$f   d)e#e!e$f   d*e#e!e$f   d+e#e!e$f   d,e#e!e$f   d-e#e!e$f   d.e%d/e#e!e$f   fd0Z&  ed1       ejN                  e&            Z(d%e#e!e$f   d&e#e!e$f   d'e#e!e$f   d(e#e!e$f   d)e#e!e$f   d*e#e!e$f   d+e#e!e$f   d,e#e!e$f   d-e#e!e$f   d.e%d/e#e!e$f   fd2Z) ed3ddddddddddddddd d!d"d#d$      Z*dDd%e#e!e*f   d4e#e!e*f   d5e#e!e*f   d)e#e!e*f   d6e#e!e*f   d,e#e!e*f   d-e#e!e*f   d.e%d/e#e!e*f   fd7Z+  ed8       ejN                  e+            Z,d%e#e!e*f   d4e#e!e*f   d5e#e!e*f   d)e#e!e*f   d6e#e!e*f   d,e#e!e*f   d-e#e!e*f   d.e%d/e#e!e*f   fd9Z- ed:ddddddddddddddd d!d"d#d$      Z.dEd%e#e!e.f   d4e#e!e.f   d)e#e!e.f   d-e#e!e.f   d.e%d;e%d/e#e!e.f   fd<Z/  ed=       ejN                  e/            Z0d%e#e!e.f   d4e#e!e.f   d)e#e!e.f   d-e#e!e.f   d.e%d;e%d/e#e!e.f   fd>Z1 ed?ddddddddddddddd d!d"d#d$      Z2dDd%e#e!e2f   d@e#e!e2f   dAe#e!e2f   d-e#e!e2f   d)e#e!e2f   dBe#e!e2f   dCe#e!e2f   dDe#e!ejf                  f   d.e%d/e#e!e2f   fdEZ4  edF       ejN                  e4            Z5d%e#e!e2f   d@e#e!e2f   dAe#e!e2f   d-e#e!e2f   d)e#e!e2f   dBe#e!e2f   dCe#e!e2f   dDe#e!ejf                  f   d.e%d/e#e!e2f   fdGZ6 edHddddddddddddddd d!d"d#d$      Z7dEd%e#e!e7f   d4e#e!e7f   d)e#e!e7f   d,e#e!e7f   d-e#e!e7f   d.e%d;e%d/e#e!e7f   fdIZ8  edJ       ejN                  e8            Z9d%e#e!e7f   d4e#e!e7f   d)e#e!e7f   d,e#e!e7f   d-e#e!e7f   d.e%d;e%d/e#e!e7f   fdKZ: edLddddddddddddddd d!d"d#d$      Z;dFd%e#e!e;f   d&e#e!e;f   d'e#e!e;f   d(e#e!e;f   dMe#e!e;f   d)e#e!e;f   d*e#e!e;f   d+e#e!e;f   d,e#e!e;f   d-e#e!e;f   d.e%dNe%d/e#e!e;f   fdOZ<  edP       ejN                  e<            Z=d%e#e!e;f   d&e#e!e;f   d'e#e!e;f   d(e#e!e;f   dMe#e!e;f   d)e#e!e;f   d*e#e!e;f   d+e#e!e;f   d,e#e!e;f   d-e#e!e;f   d.e%dNe%d/e#e!e;f   fdQZ> edRddddddddddddddd d!d"d#d$      Z?dDd%e#e!e?f   d&e#e!e?f   d)e#e!e?f   dSe#e!e?f   dTe#e!e?f   dUe#e!e?f   d-e#e!e?f   d.e%d/e#e!e?f   fdVZ@  edW       ejN                  e@            ZAd%e#e!e?f   d&e#e!e?f   d)e#e!e?f   dSe#e!e?f   dTe#e!e?f   dUe#e!e?f   d-e#e!e?f   d.e%d/e#e!e?f   fdXZB edYddddddddddddddd d!d"d#d$      ZCdDd%e#e!eCf   dZe#e!eCf   d[e#e!eCf   d\e#e!eCf   d)e#e!eCf   d6e#e!eCf   d]e#e!eCf   d,e#e!eCf   d-e#e!eCf   d.e%d/e#e!eCf   fd^ZD  ed_       ejN                  eD            ZEd%e#e!eCf   dZe#e!eCf   d[e#e!eCf   d\e#e!eCf   d)e#e!eCf   d6e#e!eCf   d]e#e!eCf   d,e#e!eCf   d-e#e!eCf   d.e%d/e#e!eCf   fd`ZF edaddddddddddddddd d!d"d#d$      ZGdFd%e#e!eGf   d4e#e!eGf   dbe#e!eGf   d-e#e!eGf   d)e#e!eGf   dBe#e!eGf   dCe#e!eGf   dce#e!eGf   d.e%dde%d/e#e!eGf   fdeZH  edf       ejN                  eH            ZId%e#e!eGf   d4e#e!eGf   dbe#e!eGf   d-e#e!eGf   d)e#e!eGf   dBe#e!eGf   dCe#e!eGf   dce#e!eGf   d.e%dde%d/e#e!eGf   fdgZJ edhddddddddddddddd d!d"d#d$      ZKdFd%e#e!eKf   d4e#e!eKf   dbe#e!eKf   d-e#e!eKf   d)e#e!eKf   dBe#e!eKf   dCe#e!eKf   die#e!eKf   dce#e!eKf   d.e%dde%d/e#e!eKf   fdjZL  edk       ejN                  eL            ZMd%e#e!eKf   d4e#e!eKf   dbe#e!eKf   d-e#e!eKf   d)e#e!eKf   dBe#e!eKf   dCe#e!eKf   die#e!eKf   dce#e!eKf   d.e%dde%d/e#e!eKf   fdlZN edmddddddddddddddd d!d"d#d$      ZOdDd%e#e!eOf   dSe#e!eOf   dne#e!eOf   d.e%d/e#e!eOf   f
doZP  edp       ejN                  eP            ZQd%e#e!eOf   dSe#e!eOf   dne#e!eOf   d.e%d/e#e!eOf   f
dqZR edrddddddddddddddd d!d"d#d$      ZSdFd%e#e!eSf   d4e#e!eSf   d)e#e!eSf   d-e#e!eSf   d]e#e!eSf   d.e%dNe%d/e#e!eSf   fdsZT  edt       ejN                  eT            ZUd%e#e!eSf   d4e#e!eSf   d)e#e!eSf   d-e#e!eSf   d]e#e!eSf   d.e%dNe%d/e#e!eSf   fduZV edvddddddddddddddd d!d"d#d$      ZWdDd%e#e!eWf   d&e#e!eWf   d)e#e!eWf   dwe#e!eWf   dTe#e!eWf   dUe#e!eWf   d-e#e!eWf   d.e%d/e#e!eWf   fdxZX  edy       ejN                  eX            ZYd%e#e!eWf   d&e#e!eWf   d)e#e!eWf   dwe#e!eWf   dTe#e!eWf   dUe#e!eWf   d-e#e!eWf   d.e%d/e#e!eWf   fdzZZ ed{ddddddddddddddd d!d"d#d$      Z[dDd%e#e!e[f   d4e#e!e[f   d)e#e!e[f   dBe#e!e[f   dCe#e!e[f   d-e#e!e[f   d.e%d/e#e!e[f   fd|Z\  ed}       ejN                  e\            Z]d%e#e!e[f   d4e#e!e[f   d)e#e!e[f   dBe#e!e[f   dCe#e!e[f   d-e#e!e[f   d.e%d/e#e!e[f   fd~Z^ edddddddddddddddd d!d"d#d$      Z_dDd%e#e!e_f   dSe#e!e_f   dBe#e!e_f   dCe#e!e_f   dne#e!e_f   d.e%d/e#e!e_f   fdZ`  ed       ejN                  e`            Zad%e#e!e_f   dSe#e!e_f   dBe#e!e_f   dCe#e!e_f   dne#e!e_f   d.e%d/e#e!e_f   fdZb edddddddddddddddd d!d"d#d$      ZcdDd%e#e!ecf   d[e#e!ecf   d\e#e!ecf   d)e#e!ecf   d6e#e!ecf   d]e#e!ecf   d,e#e!ecf   d-e#e!ecf   d.e%d/e#e!ecf   fdZd  ed       ejN                  ed            Zed%e#e!ecf   d[e#e!ecf   d\e#e!ecf   d)e#e!ecf   d6e#e!ecf   d]e#e!ecf   d,e#e!ecf   d-e#e!ecf   d.e%d/e#e!ecf   fdZf edddddddddddddddd d!d"d#d$      ZgdDd%e#e!ej                  f   d&e#e!ej                  f   d'e#e!ej                  f   d(e#e!egf   d)e#e!egf   d*e#e!egf   d+e#e!egf   d,e#e!egf   d-e#e!egf   d.e%fdZi  ed       ejN                  ei            Zjd%e#e!ej                  f   d&e#e!ej                  f   d'e#e!ej                  f   d(e#e!egf   d)e#e!egf   d*e#e!egf   d+e#e!egf   d,e#e!egf   d-e#e!egf   d.e%fdZk edddddddddddddddd d!d"d#d$      ZldDd%e#e!ej                  f   d4e#e!ej                  f   d5e#e!ej                  f   d)e#e!elf   d6e#e!elf   d,e#e!elf   d-e#e!elf   d.e%fdZm  ed       ejN                  em            Znd%e#e!ej                  f   d4e#e!ej                  f   d5e#e!ej                  f   d)e#e!elf   d6e#e!elf   d,e#e!elf   d-e#e!elf   d.e%fdZo edddddddddddddddd d!d"d#d$      ZpdEd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!epf   d-e#e!epf   d.e%d;e%fdZq  ed       ejN                  eq            Zrd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!epf   d-e#e!epf   d.e%d;e%fdZs edddddddddddddddd d!d"d#d$      ZtdDd%e#e!ej                  f   d@e#e!ej                  f   dAe#e!ej                  f   d-e#e!etf   d)e#e!etf   dBe#e!etf   dCe#e!etf   dDe#e!ejf                  f   d.e%fdZu  ed       ejN                  eu            Zvd%e#e!ej                  f   d@e#e!ej                  f   dAe#e!ej                  f   d-e#e!etf   d)e#e!etf   dBe#e!etf   dCe#e!etf   dDe#e!ejf                  f   d.e%fdZw edddddddddddddddd d!d"d#d$      ZxdEd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!exf   d,e#e!exf   d-e#e!exf   d.e%d;e%fdZy  ed       ejN                  ey            Zzd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!exf   d,e#e!exf   d-e#e!exf   d.e%d;e%fdZ{ edddddddddddddddd d!d"d#d$      Z|dFd%e#e!ej                  f   d&e#e!ej                  f   d'e#e!ej                  f   d(e#e!e|f   dMe#e!e|f   d)e#e!e|f   d*e#e!e|f   d+e#e!e|f   d,e#e!e|f   d-e#e!e|f   d.e%dNe%fdZ}  ed       ejN                  e}            Z~d%e#e!ej                  f   d&e#e!ej                  f   d'e#e!ej                  f   d(e#e!e|f   dMe#e!e|f   d)e#e!e|f   d*e#e!e|f   d+e#e!e|f   d,e#e!e|f   d-e#e!e|f   d.e%dNe%fdZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   d&e#e!ej                  f   d'e#e!ej                  f   de#e!ej                  f   d(e#e!ef   dMe#e!ef   d)e#e!ef   d*e#e!ef   d+e#e!ef   d,e#e!ef   d-e#e!ef   d.e%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d&e#e!ej                  f   d'e#e!ej                  f   de#e!ej                  f   d(e#e!ef   dMe#e!ef   d)e#e!ef   d*e#e!ef   d+e#e!ef   d,e#e!ef   d-e#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   d&e#e!ej                  f   d)e#e!ef   dSe#e!ef   dTe#e!ef   dUe#e!ef   d-e#e!ef   d.e%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d&e#e!ej                  f   d)e#e!ef   dSe#e!ef   dTe#e!ef   dUe#e!ef   d-e#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   dZe#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   d.e%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   dZe#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      ZdFd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dce#e!ef   d.e%dde%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dce#e!ef   d.e%dde%fdZ edddddddddddddddd d!d"d#d$      ZdFd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   die#e!ef   dce#e!ef   d.e%dde%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   die#e!ef   dce#e!ef   d.e%dde%fdZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   dSe#e!ef   dne#e!ef   d.e%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   dSe#e!ef   dne#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      ZdFd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   d]e#e!ef   d.e%dNe%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   d]e#e!ef   d.e%dNe%fdZ edddddddddddddddd d!d"d#d$      ZdFd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   d]e#e!ef   d.e%dNe%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   d]e#e!ef   d.e%dNe%fdZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   d&e#e!ej                  f   d)e#e!ef   dwe#e!ef   dTe#e!ef   dUe#e!ef   d-e#e!ef   d.e%fdZ  ed«       ejN                  e            Zd%e#e!ej                  f   d&e#e!ej                  f   d)e#e!ef   dwe#e!ef   dTe#e!ef   dUe#e!ef   d-e#e!ef   d.e%fdÄZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   d.e%fdńZ  edƫ       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   d.e%fdǄZ edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   dSe#e!ef   dBe#e!ef   dCe#e!ef   dne#e!ef   d.e%fdɄZ  edʫ       ejN                  e            Zd%e#e!ej                  f   dSe#e!ef   dBe#e!ef   dCe#e!ef   dne#e!ef   d.e%fd˄Z edddddddddddddddd d!d"d#d$      ZdDd%e#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   d.e%fd̈́Z  edΫ       ejN                  e            Zd%e#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   d.e%fdτZ edddddddddddddddd d!d"d#d$      Z eddd      ZdDd%e#e!ej                  f   d4e#e!ej                  f   d5e#e!ej                  f   d)e#e!ef   d6e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%fdӄZ  edԫ       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   d5e#e!ej                  f   d)e#e!ef   d6e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%fdՄZ edddddddddddddddd d!d"d#d$      Z eddd      ZdEd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%fd؄Z  ed٫       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%fdڄZ edddddddddddddddd d!d"d#d$      Z eddd      ZdDd%e#e!ej                  f   d@e#e!ej                  f   dAe#e!ej                  f   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dDe#e!ejf                  f   d.e%fd݄Z  edޫ       ejN                  e            Zd%e#e!ej                  f   d@e#e!ej                  f   dAe#e!ej                  f   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dDe#e!ejf                  f   d.e%fd߄Z edddddddddddddddd d!d"d#d$      Z eddd      ZdEd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZŐdDd%e#e!ej                  f   dZe#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%fdZ  ed       ejN                  eƫ            Zd%e#e!ej                  f   dZe#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZʐdFd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dce#e!ef   d.e%dde%fdZ  ed       ejN                  e˫            Zd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dce#e!ef   d.e%dde%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZϐdFd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   die#e!ef   dce#e!ef   d.e%dde%fdZ  ed       ejN                  eЫ            Zd%e#e!ej                  f   d4e#e!ej                  f   dbe#e!ej                  f   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   die#e!ef   dce#e!ef   d.e%dde%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZԐdFd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   de#e!ef   d]e#e!ef   d.e%dNe%fdZ  ed       ejN                  eի            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   de#e!ef   d]e#e!ef   d.e%dNe%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZِdFd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   de#e!ef   d]e#e!ef   d.e%dNe%fdZ  ed       ejN                  eګ            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   d-e#e!ef   de#e!ef   d]e#e!ef   d.e%dNe%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZސdDd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%fd Z  ed       ejN                  e߫            Zd%e#e!ej                  f   d4e#e!ej                  f   d)e#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZdDd%e#e!ej                  f   dSe#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%fdZ  ed       ejN                  e            Zd%e#e!ej                  f   dSe#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      Z ed	dd      ZdDd%e#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%fd
Z  ed       ejN                  e            Zd%e#e!ej                  f   d[e#e!ej                  f   d\e#e!ej                  f   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZdDd%e#e!ef   d4e#e!ef   d5e#e!ef   d)e#e!ef   d6e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fdZ  ed       ejN                  e            Zd%e#e!ef   d4e#e!ef   d5e#e!ef   d)e#e!ef   d6e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZdEd%e#e!ef   d4e#e!ef   d)e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%d/e#e!ef   fdZ  ed       ejN                  e            Zd%e#e!ef   d4e#e!ef   d)e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%d/e#e!ef   fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZdDd%e#e!ef   d@e#e!ef   dAe#e!ef   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dDe#e!ejf                  f   d.e%d/e#e!ef   fdZ  ed       ejN                  e            Zd%e#e!ef   d@e#e!ef   dAe#e!ef   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dDe#e!ejf                  f   d.e%d/e#e!ef   fdZ edddddddddddddddd d!d"d#d$      Z eddd      ZdEd%e#e!ef   d4e#e!ef   d)e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%d/e#e!ef   fdZ  ed       ejN                  e            Zd%e#e!ef   d4e#e!ef   d)e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d;e%d/e#e!ef   fd Z ed!ddddddddddddddd d!d"d#d$      Z  ed"dd      ZdDd%e#e!e f   dZe#e!e f   d[e#e!e f   d\e#e!e f   d)e#e!e f   d6e#e!e f   d]e#e!e f   d,e#e!e f   d-e#e!e f   de#e!ef   d.e%d/e#e!e f   fd#Z  ed$       ejN                  e            Zd%e#e!e f   dZe#e!e f   d[e#e!e f   d\e#e!e f   d)e#e!e f   d6e#e!e f   d]e#e!e f   d,e#e!e f   d-e#e!e f   de#e!ef   d.e%d/e#e!e f   fd%Z ed&ddddddddddddddd d!d"d#d$      Z ed'dd      ZdFd%e#e!ef   d4e#e!ef   dbe#e!ef   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dce#e!ef   d.e%dde%d/e#e!ef   fd(Z  ed)       ejN                  e            Zd%e#e!ef   d4e#e!ef   dbe#e!ef   d-e#e!ef   de#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   dce#e!ef   d.e%dde%d/e#e!ef   fd*Z	 ed+ddddddddddddddd d!d"d#d$      Z
 ed,dd      ZdFd%e#e!e
f   d4e#e!e
f   dbe#e!e
f   d-e#e!e
f   de#e!ef   d)e#e!e
f   dBe#e!e
f   dCe#e!e
f   die#e!e
f   dce#e!e
f   d.e%dde%d/e#e!e
f   fd-Z  ed.       ejN                  e            Zd%e#e!e
f   d4e#e!e
f   dbe#e!e
f   d-e#e!e
f   de#e!ef   d)e#e!e
f   dBe#e!e
f   dCe#e!e
f   die#e!e
f   dce#e!e
f   d.e%dde%d/e#e!e
f   fd/Z ed0ddddddddddddddd d!d"d#d$      Z ed1dd      ZdFd%e#e!ef   d4e#e!ef   d)e#e!ef   d-e#e!ef   de#e!ef   d]e#e!ef   d.e%dNe%d/e#e!ef   fd2Z  ed3       ejN                  e            Zd%e#e!ef   d4e#e!ef   d)e#e!ef   d-e#e!ef   de#e!ef   d]e#e!ef   d.e%dNe%d/e#e!ef   fd4Z ed5ddddddddddddddd d!d"d#d$      Z ed6dd      ZdDd%e#e!ef   d4e#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fd7Z  ed8       ejN                  e            Zd%e#e!ef   d4e#e!ef   d)e#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fd9Z ed:ddddddddddddddd d!d"d#d$      Z ed;dd      ZdDd%e#e!ef   dSe#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fd<Z  ed=       ejN                  e            Zd%e#e!ef   dSe#e!ef   dBe#e!ef   dCe#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fd>Z ed?ddddddddddddddd d!d"d#d$      Z ed@dd      ZdDd%e#e!ef   d[e#e!ef   d\e#e!ef   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fdAZ   edB       ejN                  e             Z!d%e#e!ef   d[e#e!ef   d\e#e!ef   d)e#e!ef   d6e#e!ef   d]e#e!ef   d,e#e!ef   d-e#e!ef   de#e!ef   d.e%d/e#e!ef   fdCZ"y(G  zUPython wrappers around TensorFlow ops.

This file is MACHINE GENERATED! Do not edit.
    N)
pywrap_tfe)context)core)execute)dtypes)annotation_types)op_def_registry)ops)op_def_library)deprecated_endpoints)dispatch)	tf_export)TypeVarListAny)	AnnotatedTV_ApplyAdaMax_Tz_atypes.BFloat16z_atypes.Complex128z_atypes.Complex64z_atypes.Float32z_atypes.Float64z_atypes.Halfz_atypes.Int16z_atypes.Int32z_atypes.Int64z_atypes.Int8z_atypes.QInt16z_atypes.QInt32z_atypes.QInt8z_atypes.QUInt16z_atypes.QUInt8z_atypes.UInt16z_atypes.UInt32z_atypes.UInt64z_atypes.UInt8varmvbeta1_powerlrbeta1beta2epsilongraduse_lockingreturnc                    t         j                   xs t        j                         }|j                  }|j                  rt	        d      |	d}	t        j                  |	d      }	t        j                  d| |||||||||	|
      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a/  Update '*var' according to the AdaMax algorithm.

  m_t <- beta1 * m_{t-1} + (1 - beta1) * g
  v_t <- max(beta2 * v_{t-1}, abs(g))
  variable <- variable - learning_rate / (1 - beta1^t) * m_t / (v_t + epsilon)

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    m: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    v: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    beta1_power: A `Tensor`. Must have the same type as `var`.
      Must be a scalar.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    beta1: A `Tensor`. Must have the same type as `var`.
      Momentum factor. Must be a scalar.
    beta2: A `Tensor`. Must have the same type as `var`.
      Momentum factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, m, and v tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Fapply_ada_max op does not support eager execution. Arg 'out' is a ref.NFr   ApplyAdaMaxr   r   r   r   r   r   r   r   r   r   nameT_contextr   _thread_local_datais_eagerRuntimeError_execute	make_bool_op_def_library_apply_op_helpermust_record_gradient_get_attr_type_get_attr_boolinputsrecord_gradient)r   r   r   r   r   r   r   r   r   r   r#   _ctxtld__op_outputs_result_attrs_inputs_flats                      V/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/ops/gen_training_ops.pyapply_ada_maxr<      s    D 
			0h..0$#\\
_
``K"";>+'883!qkb"%t#.T;!QX QK'""$3%%c*M  /1F::L|VW6('	.    zraw_ops.ApplyAdaMaxc                     t        d      )Nr    r)   )r   r   r   r   r   r   r   r   r   r   r#   ctxs               r;   apply_ada_max_eager_fallbackrA   V       ]^^r=   TV_ApplyAdadelta_Taccumaccum_updaterhoc	                    t         j                   xs t        j                         }	|	j                  }
|
j                  rt	        d      |d}t        j                  |d      }t        j                  d| ||||||||
      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a  Update '*var' according to the adadelta scheme.

  accum = rho() * accum + (1 - rho()) * grad.square();
  update = (update_accum + epsilon).sqrt() * (accum + epsilon()).rsqrt() * grad;
  update_accum = rho() * update_accum + (1 - rho()) * update.square();
  var -= update;

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    accum_update: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `var`.
      Decay factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var, accum and update_accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Gapply_adadelta op does not support eager execution. Arg 'out' is a ref.NFr   ApplyAdadelta	r   rD   rE   r   rF   r   r   r   r#   r$   r%   )r   rD   rE   r   rF   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                    r;   apply_adadeltarK   [   s    < 
			0h..0$#\\
`
aaK"";>+'88SLCt%0t=!QX QK'""$3%%c*M  /1F::Lvw8('	.r=   zraw_ops.ApplyAdadeltac
                     t        d      )NrH   r?   )
r   rD   rE   r   rF   r   r   r   r#   r@   s
             r;   apply_adadelta_eager_fallbackrM          ^__r=   TV_ApplyAdagrad_Tupdate_slotsc           
      0   t         j                   xs t        j                         }|j                  }|j                  rt	        d      |d}t        j                  |d      }|d}t        j                  |d      }t        j                  d| ||||||      \  }	}	}
}|dd }t        j                         rYd	|
j                  d	      d|
j                  d      d|
j                  d      f}|
j                  }t        j                  d|||       |\  }|S )
a/  Update '*var' according to the adagrad scheme.

  accum += grad * grad
  var -= lr * grad * (1 / sqrt(accum))

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Fapply_adagrad op does not support eager execution. Arg 'out' is a ref.NFr   TrP   ApplyAdagradr   rD   r   r   r   rP   r#   r$   r%   )r   rD   r   r   r   rP   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                  r;   apply_adagradrU      s   0 
			0h..0$#\\
_
``K"";>+L##L.A,'88Cu$$/l!#!QX QK'""$3%%c*M  /  02F ::Lfg7('	.r=   zraw_ops.ApplyAdagradc                     t        d      )NrR   r?   )r   rD   r   r   r   rP   r#   r@   s           r;   apply_adagrad_eager_fallbackrW      rB   r=   TV_ApplyAdagradDA_Tgradient_accumulatorgradient_squared_accumulatorl1l2global_stepc
                    t         j                   xs t        j                         }
|
j                  }|j                  rt	        d      |d}t        j                  |d      }t        j                  d| |||||||||	      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a^  Update '*var' according to the proximal adagrad scheme.

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    gradient_accumulator: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    gradient_squared_accumulator: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    global_step: A `Tensor` of type `int64`.
      Training step number. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Iapply_adagrad_da op does not support eager execution. Arg 'out' is a ref.NFr   ApplyAdagradDA
r   rY   rZ   r   r   r[   r\   r]   r   r#   r$   r%   )r   rY   rZ   r   r   r[   r\   r]   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                     r;   apply_adagrad_darb      s    6 
			0h..0$#\\
b
ccK"";>+'88c8L7S#rb&1{#%!QX QK'""$3%%c*M  /1F::L,9('	.r=   zraw_ops.ApplyAdagradDAc                     t        d      )Nr_   r?   )r   rY   rZ   r   r   r[   r\   r]   r   r#   r@   s              r;   apply_adagrad_da_eager_fallbackrd         `aar=   TV_ApplyAdagradV2_Tc                 2   t         j                   xs t        j                         }|j                  }	|	j                  rt	        d      |d}t        j                  |d      }|d}t        j                  |d      }t        j                  d| |||||||	      \  }
}
}}|dd }t        j                         rYd	|j                  d	      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )
a  Update '*var' according to the adagrad scheme.

  accum += grad * grad
  var -= lr * grad * (1 / sqrt(accum))

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Iapply_adagrad_v2 op does not support eager execution. Arg 'out' is a ref.NFr   TrP   ApplyAdagradV2r   rD   r   r   r   r   rP   r#   r$   r%   )r   rD   r   r   r   r   rP   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                   r;   apply_adagrad_v2rk     s    4 
			0h..0$#\\
b
ccK"";>+L##L.A,'88c2w#'3$@!QX QK'""$3%%c*M  /  02F ::L,9('	.r=   zraw_ops.ApplyAdagradV2c	                     t        d      )Nrh   r?   )	r   rD   r   r   r   r   rP   r#   r@   s	            r;   apply_adagrad_v2_eager_fallbackrm   C  re   r=   TV_ApplyAdam_Tbeta2_poweruse_nesterovc                 <   t         j                   xs t        j                         }|j                  }|j                  rt	        d      |
d}
t        j                  |
d      }
|d}t        j                  |d      }t        j                  d| |||||||||	|
||      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a
  Update '*var' according to the Adam algorithm.

  $$\text{lr}_t := \mathrm{lr} \cdot \frac{\sqrt{1 - \beta_2^t}}{1 - \beta_1^t}$$
  $$m_t := \beta_1 \cdot m_{t-1} + (1 - \beta_1) \cdot g$$
  $$v_t := \beta_2 \cdot v_{t-1} + (1 - \beta_2) \cdot g^2$$
  $$\text{var} := \begin{cases} \text{var} - (m_t \beta_1 + g \cdot (1 - \beta_1))\cdot\text{lr}_t/(\sqrt{v_t} + \epsilon), &\text{if use_nesterov}\\\\  \text{var} - m_t \cdot \text{lr}_t /(\sqrt{v_t} + \epsilon), &\text{otherwise} \end{cases}$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    m: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    v: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    beta1_power: A `Tensor`. Must have the same type as `var`.
      Must be a scalar.
    beta2_power: A `Tensor`. Must have the same type as `var`.
      Must be a scalar.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    beta1: A `Tensor`. Must have the same type as `var`.
      Momentum factor. Must be a scalar.
    beta2: A `Tensor`. Must have the same type as `var`.
      Momentum factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, m, and v tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, uses the nesterov update.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Capply_adam op does not support eager execution. Arg 'out' is a ref.NFr   rp   	ApplyAdamr   r   r   r   ro   r   r   r   r   r   r   rp   r#   r$   r%   )r   r   r   r   ro   r   r   r   r   r   r   rp   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                        r;   
apply_adamru   H  s+   N 
			0h..0$#\\
\
]]K"";>+L##L.A,'88QK!,5$4[".T	;!QX
 QK'""$3%%c*M  /  02F ::L\674('	.r=   zraw_ops.ApplyAdamc                     t        d      )Nrr   r?   )r   r   r   r   ro   r   r   r   r   r   r   rp   r#   r@   s                 r;   apply_adam_eager_fallbackrw         Z[[r=   TV_ApplyAddSign_Talpha
sign_decaybetac	                    t         j                   xs t        j                         }	|	j                  }
|
j                  rt	        d      |d}t        j                  |d      }t        j                  d| ||||||||
      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a  Update '*var' according to the AddSign update.

  m_t <- beta1 * m_{t-1} + (1 - beta1) * g
  update <- (alpha + sign_decay * sign(g) *sign(m)) * g
  variable <- variable - lr_t * update

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    m: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    alpha: A `Tensor`. Must have the same type as `var`. Must be a scalar.
    sign_decay: A `Tensor`. Must have the same type as `var`.
      Must be a scalar.
    beta: A `Tensor`. Must have the same type as `var`. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and m tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Gapply_add_sign op does not support eager execution. Arg 'out' is a ref.NFr   ApplyAddSign	r   r   r   rz   r{   r|   r   r   r#   r$   r%   )r   r   r   rz   r{   r|   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                    r;   apply_add_signr     s    8 
			0h..0$#\\
`
aaK"";>+'88C15#-Dt$/d<!QX QK'""$3%%c*M  /1F::Lfg7('	.r=   zraw_ops.ApplyAddSignc
                     t        d      )Nr~   r?   )
r   r   r   rz   r{   r|   r   r   r#   r@   s
             r;   apply_add_sign_eager_fallbackr     rN   r=   TV_ApplyCenteredRMSProp_Tmgmsmommomentumc                    t         j                   xs t        j                         }|j                  }|j                  rt	        d      |	d}	t        j                  |	d      }	t        j                  d| |||||||||	|
      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a/	  Update '*var' according to the centered RMSProp algorithm.

  The centered RMSProp algorithm uses an estimate of the centered second moment
  (i.e., the variance) for normalization, as opposed to regular RMSProp, which
  uses the (uncentered) second moment. This often helps with training, but is
  slightly more expensive in terms of computation and memory.

  Note that in dense implementation of this algorithm, mg, ms, and mom will
  update even if the grad is zero, but in this sparse implementation, mg, ms,
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  mean_grad = decay * mean_grad + (1-decay) * gradient

  Delta = learning_rate * gradient / sqrt(mean_square + epsilon - mean_grad ** 2)

  mg <- rho * mg_{t-1} + (1-rho) * grad
  ms <- rho * ms_{t-1} + (1-rho) * grad * grad
  mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms - mg * mg + epsilon)
  var <- var - mom

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    mg: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    ms: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    mom: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `var`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `var`.
      Momentum Scale. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, mg, ms, and mom tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Papply_centered_rms_prop op does not support eager execution. Arg 'out' is a ref.NFr   ApplyCenteredRMSPropr   r   r   r   r   rF   r   r   r   r   r#   r$   r%   )r   r   r   r   r   rF   r   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                      r;   apply_centered_rms_propr     s    b 
			0h..0$#\\
i
jjK"";>+'88CB232$'(G%){O!QX QK'""$3%%c*M  /1F::Lfg?('	.r=   zraw_ops.ApplyCenteredRMSPropc                     t        d      )Nr   r?   )r   r   r   r   r   rF   r   r   r   r   r#   r@   s               r;   &apply_centered_rms_prop_eager_fallbackr         ghhr=   TV_ApplyFtrl_Tlinearlr_powermultiply_linear_by_lrc                 8   t         j                   xs t        j                         }|j                  }|j                  rt	        d      |d}t        j                  |d      }|	d}	t        j                  |	d      }	t        j                  d| |||||||||	|
      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a  Update '*var' according to the Ftrl-proximal scheme.

  accum_new = accum + grad * grad
  linear += grad - (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    linear: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    lr_power: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Capply_ftrl op does not support eager execution. Arg 'out' is a ref.NFr   r   	ApplyFtrlr   rD   r   r   r   r[   r\   r   r   r   r#   r$   r%   )r   rD   r   r   r   r[   r\   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                      r;   
apply_ftrlr     s-   F 
			0h..0$#\\
\
]]K"";>+"!",,-BD[\'88E&trH++@tM!QX QK'""$3%%c*M  /1H  !89;F ::L\674('	.r=   zraw_ops.ApplyFtrlc                     t        d      )Nr   r?   )r   rD   r   r   r   r[   r\   r   r   r   r#   r@   s               r;   apply_ftrl_eager_fallbackr   [  rx   r=   TV_ApplyFtrlV2_Tl2_shrinkagec                 :   t         j                   xs t        j                         }|j                  }|j                  rt	        d      |	d}	t        j                  |	d      }	|
d}
t        j                  |
d      }
t        j                  d| |||||||||	|
|      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a%  Update '*var' according to the Ftrl-proximal scheme.

  grad_with_shrinkage = grad + 2 * l2_shrinkage * var
  accum_new = accum + grad * grad
  linear += grad_with_shrinkage -
      (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    linear: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 shrinkage regularization. Must be a scalar.
    l2_shrinkage: A `Tensor`. Must have the same type as `var`.
    lr_power: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Fapply_ftrl_v2 op does not support eager execution. Arg 'out' is a ref.NFr   r   ApplyFtrlV2r   rD   r   r   r   r[   r\   r   r   r   r   r#   r$   r%   )r   rD   r   r   r   r[   r\   r   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                       r;   apply_ftrl_v2r   `  s0   L 
			0h..0$#\\
_
``K"";>+"!",,-BD[\'883eF", (k-B	O!QX
 QK'""$3%%c*M  /1H  !89;F ::L|VW6('	.r=   zraw_ops.ApplyFtrlV2c                     t        d      )Nr   r?   )r   rD   r   r   r   r[   r\   r   r   r   r   r#   r@   s                r;   apply_ftrl_v2_eager_fallbackr     rB   r=   TV_ApplyGradientDescent_Tdeltac                    t         j                   xs t        j                         }|j                  }|j                  rt	        d      |d}t        j                  |d      }t        j                  d| ||||      \  }}}}	|	dd }
t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||
       |
\  }
|
S )aA  Update '*var' by subtracting 'alpha' * 'delta' from it.

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    alpha: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    delta: A `Tensor`. Must have the same type as `var`. The change.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, the subtraction will be protected by a lock;
      otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Oapply_gradient_descent op does not support eager execution. Arg 'out' is a ref.NFr   ApplyGradientDescentr   rz   r   r   r#   r$   r%   )r   rz   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                r;   apply_gradient_descentr     s    " 
			0h..0$#\\
h
iiK"";>+'88CuE,7dD!QX QK'""$3%%c*M  /1F::Lfg?('	.r=   zraw_ops.ApplyGradientDescentc                     t        d      )Nr   r?   )r   rz   r   r   r#   r@   s         r;   %apply_gradient_descent_eager_fallbackr         fggr=   TV_ApplyMomentum_Tc                 2   t         j                   xs t        j                         }|j                  }	|	j                  rt	        d      |d}t        j                  |d      }|d}t        j                  |d      }t        j                  d| |||||||	      \  }
}
}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	at  Update '*var' according to the momentum scheme.

  Set use_nesterov = True if you want to use Nesterov momentum.

  accum = accum * momentum + grad
  var -= lr * accum

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    momentum: A `Tensor`. Must have the same type as `var`.
      Momentum. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, the tensor passed to compute grad will be
      var - lr * momentum * accum, so in the end, the var you get is actually
      var - lr * momentum * accum.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Gapply_momentum op does not support eager execution. Arg 'out' is a ref.NFr   rp   ApplyMomentumr   rD   r   r   r   r   rp   r#   r$   r%   )r   rD   r   r   r   r   rp   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                   r;   apply_momentumr     s   > 
			0h..0$#\\
`
aaK"";>+L##L.A,'88S"4"*&2?!QX QK'""$3%%c*M  /  02F ::Lvw8('	.r=   zraw_ops.ApplyMomentumc	                     t        d      )Nr   r?   )	r   rD   r   r   r   r   rp   r#   r@   s	            r;   apply_momentum_eager_fallbackr     rN   r=   TV_ApplyPowerSign_Tlogbasec	                    t         j                   xs t        j                         }	|	j                  }
|
j                  rt	        d      |d}t        j                  |d      }t        j                  d| ||||||||
      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a)  Update '*var' according to the AddSign update.

  m_t <- beta1 * m_{t-1} + (1 - beta1) * g
  update <- exp(logbase * sign_decay * sign(g) * sign(m_t)) * g
  variable <- variable - lr_t * update

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    m: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    logbase: A `Tensor`. Must have the same type as `var`. Must be a scalar.
    sign_decay: A `Tensor`. Must have the same type as `var`.
      Must be a scalar.
    beta: A `Tensor`. Must have the same type as `var`. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and m tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Iapply_power_sign op does not support eager execution. Arg 'out' is a ref.NFr   ApplyPowerSign	r   r   r   r   r{   r|   r   r   r#   r$   r%   )r   r   r   r   r{   r|   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                    r;   apply_power_signr     s    8 
			0h..0$#\\
b
ccK"";>+'88cQ2w%/d&1>!QX QK'""$3%%c*M  /1F::L,9('	.r=   zraw_ops.ApplyPowerSignc
                     t        d      )Nr   r?   )
r   r   r   r   r{   r|   r   r   r#   r@   s
             r;   apply_power_sign_eager_fallbackr   M  re   r=   TV_ApplyProximalAdagrad_Tc                    t         j                   xs t        j                         }|j                  }	|	j                  rt	        d      |d}t        j                  |d      }t        j                  d| |||||||	      \  }
}
}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a	  Update '*var' and '*accum' according to FOBOS with Adagrad learning rate.

  accum += grad * grad
  prox_v = var - lr * grad * (1 / sqrt(accum))
  var = sign(prox_v)/(1+lr*l2) * max{|prox_v|-lr*l1,0}

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Oapply_proximal_adagrad op does not support eager execution. Arg 'out' is a ref.NFr   ApplyProximalAdagradr   rD   r   r[   r\   r   r   r#   r$   r%   )r   rD   r   r[   r\   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                   r;   apply_proximal_adagradr   R  s    6 
			0h..0$#\\
h
iiK"";>+'88Cur%){O!QX QK'""$3%%c*M  /1F::Lfg?('	.r=   zraw_ops.ApplyProximalAdagradc	                     t        d      )Nr   r?   )	r   rD   r   r[   r\   r   r   r#   r@   s	            r;   %apply_proximal_adagrad_eager_fallbackr     r   r=   !TV_ApplyProximalGradientDescent_Tc           
         t         j                   xs t        j                         }|j                  }|j                  rt	        d      |d}t        j                  |d      }t        j                  d| ||||||      \  }	}	}
}|dd }t        j                         rHd|
j                  d      d|
j                  d      f}|
j                  }t        j                  d|||       |\  }|S )aa  Update '*var' as FOBOS algorithm with fixed learning rate.

  prox_v = var - alpha * delta
  var = sign(prox_v)/(1+alpha*l2) * max{|prox_v|-alpha*l1,0}

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    alpha: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    delta: A `Tensor`. Must have the same type as `var`. The change.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, the subtraction will be protected by a lock;
      otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Xapply_proximal_gradient_descent op does not support eager execution. Arg 'out' is a ref.NFr   ApplyProximalGradientDescentr   rz   r[   r\   r   r   r#   r$   r%   )r   rz   r[   r\   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                  r;   apply_proximal_gradient_descentr     s    0 
			0h..0$#\\
q
rrK"";>+'88&Cu.3-13!QX QK'""$3%%c*M  /1F::L&fgG('	.r=   z$raw_ops.ApplyProximalGradientDescentc                     t        d      )Nr   r?   )r   rz   r[   r\   r   r   r#   r@   s           r;   .apply_proximal_gradient_descent_eager_fallbackr     s    oppr=   TV_ApplyRMSProp_Tc
                    t         j                   xs t        j                         }
|
j                  }|j                  rt	        d      |d}t        j                  |d      }t        j                  d| |||||||||	      \  }}}}|dd }t        j                         rHd|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a  Update '*var' according to the RMSProp algorithm.

  Note that in dense implementation of this algorithm, ms and mom will
  update even if the grad is zero, but in this sparse implementation, ms
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  Delta = learning_rate * gradient / sqrt(mean_square + epsilon)

  ms <- rho * ms_{t-1} + (1-rho) * grad * grad
  mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)
  var <- var - mom

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    ms: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    mom: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `var`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `var`.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, ms, and mom tensors is protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Gapply_rms_prop op does not support eager execution. Arg 'out' is a ref.NFr   ApplyRMSProp
r   r   r   r   rF   r   r   r   r   r#   r$   r%   )r   r   r   r   rF   r   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                     r;   apply_rms_propr     s    L 
			0h..0$#\\
`
aaK"";>+'88CBCBC!)7$/d<!QX QK'""$3%%c*M  /1F::Lfg7('	.r=   zraw_ops.ApplyRMSPropc                     t        d      )Nr   r?   )r   r   r   r   rF   r   r   r   r   r#   r@   s              r;   apply_rms_prop_eager_fallbackr     rN   r=   TV_ResourceApplyAdaMax_Tc                 B   t         j                   xs t        j                         }|j                  }|j                  r%	 t	        j
                  |d|
| ||||||||d|	      }|S |	d}	t        j                  |	d      }	t        j                   d| |||||||||	|
      \  }}}}|S # t        j                  $ r }t        j                  ||
       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|      S # t        j                  $ r Y w xY w)a  Update '*var' according to the AdaMax algorithm.

  m_t <- beta1 * m_{t-1} + (1 - beta1) * g
  v_t <- max(beta2 * v_{t-1}, abs(g))
  variable <- variable - learning_rate / (1 - beta1^t) * m_t / (v_t + epsilon)

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    m: A `Tensor` of type `resource`. Should be from a Variable().
    v: A `Tensor` of type `resource`. Should be from a Variable().
    beta1_power: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Must be a scalar.
    lr: A `Tensor`. Must have the same type as `beta1_power`.
      Scaling factor. Must be a scalar.
    beta1: A `Tensor`. Must have the same type as `beta1_power`.
      Momentum factor. Must be a scalar.
    beta2: A `Tensor`. Must have the same type as `beta1_power`.
      Momentum factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `beta1_power`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `beta1_power`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, m, and v tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdaMaxr   Nr   r#   r@   Fr"   )r&   r   r'   r(   r   TFE_Py_FastPathExecute_core_NotOkStatusException_opsraise_from_not_ok_status_FallbackException%resource_apply_ada_max_eager_fallback_SymbolicExceptionr*   r+   r,   r-   )r   r   r   r   r   r   r   r   r   r   r#   r3   r4   r8   er5   r6   r7   s                     r;   resource_apply_ada_maxr     sH   > 
			0h..0$#\\11#T31k2uwm[:g n K"";>+'883!qk"$E'.T+6T	C!QX
 
*' && -
##At,,## 
2
q!["eUGT!$8 8 ## 
0    #B& &C-9CC-,C-1D DDzraw_ops.ResourceApplyAdaMaxc                    |	d}	t        j                  |	d      }	t        j                  ||||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| ||||||||g	}d|d|	f}t        j4                  dd||||
      }d }|S )NFr   r$   s   ResourceApplyAdaMaxr   r1   attrsr@   r#   r*   r+   args_to_matching_eager_dtypesfloat32float64int32uint8int16int8	complex64int64qint8quint8qint32bfloat16qint16quint16uint16
complex128halfuint32uint64r   convert_to_tensorresourcer   )r   r   r   r   r   r   r   r   r   r   r#   r@   _attr_T	_inputs_Tr:   r9   r8   s                    r;   r   r   B  s|   K"";>+66RPUW^`d7egjmtm|m|  F  N  N  PW  P]  P]  _f  _l  _l  nu  n{  n{  }D  }I  }I  KR  K\  K\  ^e  ^k  ^k  mt  mz  mz  |C  |J  |J  LS  LZ  LZ  \c  \l  \l  nu  n|  n|  ~E  ~M  ~M  OV  O]  O]  _f  _q  _q  sz  s  s  AH  AO  AO  QX  Q_  Q_  mb  c'93<0;E5'4sG$4$45#
Q 0 01!
Q 0 01!q!["eUGTJ,-5&3Q|#)s?''	.r=   TV_ResourceApplyAdadelta_Tc	                 6   t         j                   xs t        j                         }	|	j                  }
|
j                  r#	 t	        j
                  |	d|| ||||||d|      }|S |d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)aW  Update '*var' according to the adadelta scheme.

  accum = rho() * accum + (1 - rho()) * grad.square();
  update = (update_accum + epsilon).sqrt() * (accum + epsilon()).rsqrt() * grad;
  update_accum = rho() * update_accum + (1 - rho()) * update.square();
  var -= update;

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    accum_update: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `lr`.
      Decay factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var, accum and update_accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdadeltar   Nr   FrJ   )r&   r   r'   r(   r   r   r   r   r   r   r   &resource_apply_adadelta_eager_fallbackr   r*   r+   r,   r-   )r   rD   rE   r   rF   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                   r;   resource_apply_adadeltar  U  s<   6 
			0h..0$#\\11%tS%rWdM;8g n K"";>+'88S.:rs)0t-8t	E!QX
 
*' && -
##At,,## 
3
ulBWd!$8 8 ## 
0    !B" "C)5CC)(C)-D DDzraw_ops.ResourceApplyAdadeltac
                    |d}t        j                  |d      }t        j                  ||||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| ||||||g}d|
d|f}t        j4                  dd|||	|      }d }|S )NFr   r$   s   ResourceApplyAdadeltar   r   r   )r   rD   rE   r   rF   r   r   r   r#   r@   r  r  r:   r9   r8   s                  r;   r  r    sa   K"";>+66C$7OQTW^WfWfhohwhw  zA  zG  zG  IP  IV  IV  X_  Xe  Xe  gn  gs  gs  u|  uF  uF  HO  HU  HU  W^  Wd  Wd  fm  ft  ft  v}  vD  vD  FM  FV  FV  X_  Xf  Xf  ho  hw  hw  y@  yG  yG  IP  I[  I[  ]d  ]i  ]i  kr  ky  ky  {B  {I  {I  WL  M'9&2sGTsG$4$45#

 
 (8(8
9%''g6F6FG,ulBWdC,-5&5q#)s?''	.r=   TV_ResourceApplyAdagrad_Tc                 `   t         j                   xs t        j                         }|j                  }|j                  r"	 t	        j
                  |d|| |||d|d|      }	|	S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| ||||||      \  }}}}|S # t        j                  $ r }
t        j                  |
|       Y d}
~
nd}
~
wt        j                  $ r Y nw xY w	 t        | |||||||      S # t        j                  $ r Y w xY w)	a  Update '*var' according to the adagrad scheme.

  accum += grad * grad
  var -= lr * grad * (1 / sqrt(accum))

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdagradr   rP   Nr   rP   r#   r@   FTrT   )r&   r   r'   r(   r   r   r   r   r   r   r   %resource_apply_adagrad_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   r   rP   r#   r3   r4   r8   r   r5   r6   r7   s                 r;   resource_apply_adagradr    sO   , 
			0h..0$#\\11$dCD{NLBg n K"";>+L##L.A,'88Cu$,7-9F!QX 
*+ && -
##At,,## 
2
ub$K#$D: : ## 
s0     B9 9D C''D ?D D D-,D-zraw_ops.ResourceApplyAdagradc                    |d}t        j                  |d      }|d}t        j                  |d      }t        j                  ||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}	|	\  }}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| |||g}
d|d|d|f}t        j4                  dd|
|||      }d }|S )	NFr   TrP   r$   s   ResourceApplyAdagradr   r   r   )r   rD   r   r   r   rP   r#   r@   r  r  r:   r9   r8   s                r;   r  r    sM   K"";>+L##L.A,66Dz3ZaZiZikrkxkx  {B  {H  {H  JQ  JW  JW  Y`  Ye  Ye  gn  gx  gx  zA  zG  zG  IP  IV  IV  X_  Xf  Xf  ho  hv  hv  x  xH  xH  JQ  JX  JX  Za  Zi  Zi  kr  ky  ky  {B  {M  {M  OV  O[  O[  ]d  ]k  ]k  mt  m{  m{  I~  '9*2tsG$4$45#

 
 (8(8
9%ub$',-n&4a#)s?''	.r=   TV_ResourceApplyAdagradDA_Tc
                 <   t         j                   xs t        j                         }
|
j                  }|j                  r$	 t	        j
                  |
d|	| |||||||d|      }|S |d}t        j                  |d      }t        j                   d| |||||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||	       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
      S # t        j                  $ r Y w xY w)a  Update '*var' according to the proximal adagrad scheme.

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    gradient_accumulator: A `Tensor` of type `resource`.
      Should be from a Variable().
    gradient_squared_accumulator: A `Tensor` of type `resource`.
      Should be from a Variable().
    grad: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      The gradient.
    lr: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `grad`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `grad`.
      L2 regularization. Must be a scalar.
    global_step: A `Tensor` of type `int64`.
      Training step number. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdagradDAr   Nr   Fra   )r&   r   r'   r(   r   r   r   r   r   r   r   (resource_apply_adagrad_da_eager_fallbackr   r*   r+   r,   r-   )r   rY   rZ   r   r   r[   r\   r]   r   r#   r3   r4   r8   r   r5   r6   r7   s                    r;   resource_apply_adagrad_dar    sJ   6 
			0h..0$#\\	11&c3G$dBB{$g n K"";>+'88 c7K?['+rb.9.9F!QX 
*+ && -
##At,,## 
5
#%A4
b+;TtM M ## 
0    "B$ $C+7CC+*C+/D DDzraw_ops.ResourceApplyAdagradDAc                 8   |d}t        j                  |d      }t        j                  ||||g|
t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }t/        j0                  |t        j                        }| |||||||g}d|d|f}t        j4                  dd|||
|	      }d }|S )NFr   r$   s   ResourceApplyAdagradDAr   r   r   )r   rY   rZ   r   r   r[   r\   r]   r   r#   r@   r  r  r:   r9   r8   s                   r;   r  r  ,  s   K"";>+66b"b7I3QXQ`Q`bibqbqsz  tA  tA  CJ  CP  CP  RY  R_  R_  ah  am  am  ov  o@  o@  BI  BO  BO  QX  Q^  Q^  `g  `n  `n  pw  p~  p~  @G  @P  @P  RY  R`  R`  bi  bq  bq  sz  sA  sA  CJ  CU  CU  W^  Wc  Wc  el  es  es  u|  uC  uC  QF  G'9 4RsG$4$45#//0DgFVFVW!%!7!78TV]VfVf!g&&{GMMB++-I4QSUWY[]hi,-5&6$0C"&(' '	.r=   TV_ResourceApplyAdagradV2_Tc                 f   t         j                   xs t        j                         }|j                  }	|	j                  r#	 t	        j
                  |d|| ||||d|d|      }
|
S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| |||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | ||||||||	      S # t        j                  $ r Y w xY w)	aJ  Update '*var' according to the adagrad scheme.

  accum += grad * grad
  var -= lr * grad * (1 / (sqrt(accum) + epsilon))

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdagradV2r   rP   Nr  FTrj   )r&   r   r'   r(   r   r   r   r   r   r   r   (resource_apply_adagrad_v2_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   r   r   rP   r#   r3   r4   r8   r   r5   r6   r7   s                  r;   resource_apply_adagrad_v2r  A  sV   0 
			0h..0$#\\11&c5"gt{NLBg n K"";>+L##L.A,'88 c2*1.9/;$	H!QX
 
*- && -
##At,,## 
5
ub'4[#$D: : ## 
0    !B; ;DC))DDD D0/D0zraw_ops.ResourceApplyAdagradV2c	                    |d}t        j                  |d      }|d}t        j                  |d      }t        j                  |||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }	}
|
\  }}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||g}d|	d|d|f}t        j4                  dd||||      }d }|S )	NFr   TrP   r$   s   ResourceApplyAdagradV2r   r   r   )r   rD   r   r   r   r   rP   r#   r@   r  r  r:   r9   r8   s                 r;   r  r  {  sc   K"";>+L##L.A,66GT7JCRYRaRacjcrcrt{  uB  uB  DK  DQ  DQ  SZ  S`  S`  bi  bn  bn  pw  pA  pA  CJ  CP  CP  RY  R_  R_  ah  ao  ao  qx  q  q  AH  AQ  AQ  SZ  Sa  Sa  cj  cr  cr  t{  tB  tB  DK  DV  DV  X_  Xd  Xd  fm  ft  ft  v}  vD  vD  RG  H'9!2wsG$4$45#

 
 (8(8
9%ub'40,-n&6$0C"&(' '	.r=   TV_ResourceApplyAdam_Tc                    t         j                   xs t        j                         }|j                  }|j                  r(	 t	        j
                  |d|| |||||||||	d|
d|      }|S |
d}
t        j                  |
d      }
|d}t        j                  |d      }t        j                   d| |||||||||	|
||      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|||      S # t        j                  $ r Y w xY w)a  Update '*var' according to the Adam algorithm.

  $$\text{lr}_t := \mathrm{lr} \cdot \frac{\sqrt{1 - \beta_2^t}}{1 - \beta_1^t}$$
  $$m_t := \beta_1 \cdot m_{t-1} + (1 - \beta_1) \cdot g$$
  $$v_t := \beta_2 \cdot v_{t-1} + (1 - \beta_2) \cdot g^2$$
  $$\text{var} := \begin{cases} \text{var} - (m_t \beta_1 + g \cdot (1 - \beta_1))\cdot\text{lr}_t/(\sqrt{v_t} + \epsilon), &\text{if use_nesterov}\\\\  \text{var} - m_t \cdot \text{lr}_t /(\sqrt{v_t} + \epsilon), &\text{otherwise} \end{cases}$$

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    m: A `Tensor` of type `resource`. Should be from a Variable().
    v: A `Tensor` of type `resource`. Should be from a Variable().
    beta1_power: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Must be a scalar.
    beta2_power: A `Tensor`. Must have the same type as `beta1_power`.
      Must be a scalar.
    lr: A `Tensor`. Must have the same type as `beta1_power`.
      Scaling factor. Must be a scalar.
    beta1: A `Tensor`. Must have the same type as `beta1_power`.
      Momentum factor. Must be a scalar.
    beta2: A `Tensor`. Must have the same type as `beta1_power`.
      Momentum factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `beta1_power`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `beta1_power`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, m, and v tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, uses the nesterov update.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdamr   rp   Nr   rp   r#   r@   Frt   )r&   r   r'   r(   r   r   r   r   r   r   r   "resource_apply_adam_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r   ro   r   r   r   r   r   r   rp   r#   r3   r4   r8   r   r5   r6   r7   s                       r;   resource_apply_adamr%    sw   H 
			0h..0$#\\	11!4aK
E5'4&g n K"";>+L##L.A,'88QK)45#(')4*6TC!QX 
*1 && -
##At,,## 
/
q![+r5%
Kl  ## 
0    &C DC33DDD) )D?>D?zraw_ops.ResourceApplyAdamc                 8   |
d}
t        j                  |
d      }
|d}t        j                  |d      }t        j                  |||||||	g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}}}	t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||||	g
}d|d|
d|f}t        j4                  dd||||      }d }|S )NFr   rp   r$   s   ResourceApplyAdamr   r   r   )r   r   r   r   ro   r   r   r   r   r   r   rp   r#   r@   r  r  r:   r9   r8   s                      r;   r$  r$    s   K"";>+L##L.A,66[RTV[]bdkmq7rtw  {B  {J  {J  LS  L[  L[  ]d  ]j  ]j  ls  ly  ly  {B  {H  {H  JQ  JV  JV  X_  Xi  Xi  kr  kx  kx  zA  zG  zG  IP  IW  IW  Y`  Yg  Yg  ip  iy  iy  {B  {I  {I  KR  KZ  KZ  \c  \j  \j  ls  l~  l~  @G  @L  @L  NU  N\  N\  ^e  ^l  ^l  zo  p'9@I=;RwsG$4$45#
Q 0 01!
Q 0 01!q![+r5%RVW,-n&11\#)s?''	.r=   !TV_ResourceApplyAdamWithAmsgrad_Tvhatc                 N   t         j                   xs t        j                         }|j                  }|j                  r'	 t	        j
                  |d|| |||||||||	|
d|      }|S |d}t        j                  |d      }t        j                   d| |||||||||	|
||      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|||      S # t        j                  $ r Y w xY w)a1  Update '*var' according to the Adam algorithm.

  $$\text{lr}_t := \mathrm{learning_rate} * \sqrt{1 - \beta_2^t} / (1 - \beta_1^t)$$
  $$m_t := \beta_1 * m_{t-1} + (1 - \beta_1) * g$$
  $$v_t := \beta_2 * v_{t-1} + (1 - \beta_2) * g * g$$
  $$\hat{v}_t := max{\hat{v}_{t-1}, v_t}$$
  $$\text{variable} := \text{variable} - \text{lr}_t * m_t / (\sqrt{\hat{v}_t} + \epsilon)$$

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    m: A `Tensor` of type `resource`. Should be from a Variable().
    v: A `Tensor` of type `resource`. Should be from a Variable().
    vhat: A `Tensor` of type `resource`. Should be from a Variable().
    beta1_power: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Must be a scalar.
    beta2_power: A `Tensor`. Must have the same type as `beta1_power`.
      Must be a scalar.
    lr: A `Tensor`. Must have the same type as `beta1_power`.
      Scaling factor. Must be a scalar.
    beta1: A `Tensor`. Must have the same type as `beta1_power`.
      Momentum factor. Must be a scalar.
    beta2: A `Tensor`. Must have the same type as `beta1_power`.
      Momentum factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `beta1_power`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `beta1_power`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, m, and v tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAdamWithAmsgradr   Nr   F)r   r   r   r)  r   ro   r   r   r   r   r   r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   /resource_apply_adam_with_amsgrad_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r)  r   ro   r   r   r   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                       r;    resource_apply_adam_with_amsgradr-    sZ   H 
			0h..0$#\\	11,dCAt["eUGT{$g n K"";>+'88&C14?4?B.3507d4?dL!QX 
*+ && -
##At,,## 
<
q!T;R
4[tG G ## 
s0    %B* *C1=CC10C15D D$#D$z$raw_ops.ResourceApplyAdamWithAmsgradc                 J   |d}t        j                  |d      }t        j                  ||||||	|
g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}}	}
t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||||	|
g}d|d|f}t        j4                  dd||||      }d }|S )NFr   r$   s   ResourceApplyAdamWithAmsgradr   r   r   )r   r   r   r)  r   ro   r   r   r   r   r   r   r#   r@   r  r  r:   r9   r8   s                      r;   r,  r,  8  s   K"";>+66[RTV[]bdkmq7rtw  {B  {J  {J  LS  L[  L[  ]d  ]j  ]j  ls  ly  ly  {B  {H  {H  JQ  JV  JV  X_  Xi  Xi  kr  kx  kx  zA  zG  zG  IP  IW  IW  Y`  Yg  Yg  ip  iy  iy  {B  {I  {I  KR  KZ  KZ  \c  \j  \j  ls  l~  l~  @G  @L  @L  NU  N\  N\  ^e  ^l  ^l  zo  p'9@I=;RwsG$4$45#
Q 0 01!
Q 0 01!			g&6&6	7$q!T;RwX\],-5&<a$0C"&(' '	.r=   TV_ResourceApplyAddSign_Tc	                 6   t         j                   xs t        j                         }	|	j                  }
|
j                  r#	 t	        j
                  |	d|| ||||||d|      }|S |d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)a  Update '*var' according to the AddSign update.

  m_t <- beta1 * m_{t-1} + (1 - beta1) * g
  update <- (alpha + sign_decay * sign(g) *sign(m)) * g
  variable <- variable - lr_t * update

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    m: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    alpha: A `Tensor`. Must have the same type as `lr`. Must be a scalar.
    sign_decay: A `Tensor`. Must have the same type as `lr`. Must be a scalar.
    beta: A `Tensor`. Must have the same type as `lr`. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and m tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyAddSignr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   &resource_apply_add_sign_eager_fallbackr   r*   r+   r,   r-   )r   r   r   rz   r{   r|   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                   r;   resource_apply_add_signr3  M  s;   2 
			0h..0$#\\11$dCBzdM;0g n K"";>+'88C15+5Dt,7dD!QX 
*% && -
##At,,## 
3
q"eZt  ## 
r	  zraw_ops.ResourceApplyAddSignc
                    |d}t        j                  |d      }t        j                  |||||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||||g}d|
d|f}t        j4                  dd|||	|      }d }|S )NFr   r$   s   ResourceApplyAddSignr   r   r   )r   r   r   rz   r{   r|   r   r   r#   r@   r  r  r:   r9   r8   s                  r;   r2  r2    sV   K"";>+66E:tUY7Z\_bibqbqsz  tC  tC  EL  ER  ER  T[  Ta  Ta  cj  cp  cp  ry  r~  r~  @G  @Q  @Q  SZ  S`  S`  bi  bo  bo  qx  q  q  AH  AO  AO  QX  Qa  Qa  cj  cq  cq  sz  sB  sB  DK  DR  DR  T[  Tf  Tf  ho  ht  ht  v}  vD  vD  FM  FT  FT  bW  X'9(1%2uj$sG$4$45#
Q 0 01!q"eZt<,-5&4a#)s?''	.r=   !TV_ResourceApplyCenteredRMSProp_Tc                 B   t         j                   xs t        j                         }|j                  }|j                  r%	 t	        j
                  |d|
| ||||||||d|	      }|S |	d}	t        j                  |	d      }	t        j                   d| |||||||||	|
      \  }}}}|S # t        j                  $ r }t        j                  ||
       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|      S # t        j                  $ r Y w xY w)a  Update '*var' according to the centered RMSProp algorithm.

  The centered RMSProp algorithm uses an estimate of the centered second moment
  (i.e., the variance) for normalization, as opposed to regular RMSProp, which
  uses the (uncentered) second moment. This often helps with training, but is
  slightly more expensive in terms of computation and memory.

  Note that in dense implementation of this algorithm, mg, ms, and mom will
  update even if the grad is zero, but in this sparse implementation, mg, ms,
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  mean_grad = decay * mean_grad + (1-decay) * gradient

  Delta = learning_rate * gradient / sqrt(mean_square + epsilon - mean_grad ** 2)

  mg <- rho * mg_{t-1} + (1-rho) * grad
  ms <- rho * ms_{t-1} + (1-rho) * grad * grad
  mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms - mg * mg + epsilon)
  var <- var - mom

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    mg: A `Tensor` of type `resource`. Should be from a Variable().
    ms: A `Tensor` of type `resource`. Should be from a Variable().
    mom: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `lr`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `lr`.
      Momentum Scale. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, mg, ms, and mom tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyCenteredRMSPropr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   /resource_apply_centered_rms_prop_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r   r   rF   r   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                     r;    resource_apply_centered_rms_propr9    sI   Z 
			0h..0$#\\11,dCRb#'4=g n K"";>+'88&CB232,/(07d4?d	L!QX
 
*' && -
##At,,## 
<
r2sBXw!$8 8 ## 
r   z$raw_ops.ResourceApplyCenteredRMSPropc                 >   |	d}	t        j                  |	d      }	t        j                  |||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| ||||||||g	}d|d|	f}t        j4                  dd||||
      }d }|S )NFr   r$   s   ResourceApplyCenteredRMSPropr   r   r   )r   r   r   r   r   rF   r   r   r   r   r#   r@   r  r  r:   r9   r8   s                    r;   r8  r8    s   K"";>+66C7TX7Y[^ahapapry  sB  sB  DK  DQ  DQ  SZ  S`  S`  bi  bo  bo  qx  q}  q}  F  P  P  RY  R_  R_  ah  an  an  pw  p~  p~  @G  @N  @N  PW  P`  P`  bi  bp  bp  ry  rA  rA  CJ  CQ  CQ  SZ  Se  Se  gn  gs  gs  u|  uC  uC  EL  ES  ES  aV  W'9'0$2sHgtsG$4$45#b'"2"23"b'"2"23"sG$4$45#r2sBXwE,-5&<a$0C"&(' '	.r=   TV_ResourceApplyFtrl_Tc                 x   t         j                   xs t        j                         }|j                  }|j                  r&	 t	        j
                  |d|
| |||||||d|d|	      }|S |d}t        j                  |d      }|	d}	t        j                  |	d      }	t        j                   d| |||||||||	|
      \  }}}}|S # t        j                  $ r }t        j                  ||
       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|      S # t        j                  $ r Y w xY w)a*  Update '*var' according to the Ftrl-proximal scheme.

  accum_new = accum + grad * grad
  linear += grad - (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    linear: A `Tensor` of type `resource`. Should be from a Variable().
    grad: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      The gradient.
    lr: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `grad`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `grad`.
      L2 regularization. Must be a scalar.
    lr_power: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyFtrlr   r   Nr   r   r#   r@   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   "resource_apply_ftrl_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   r   r[   r\   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                     r;   resource_apply_ftrlr@    sq   B 
			0h..0$#\\	11!4eVT2r2-.Eg n K"";>+"!",,-BD[\'88E&t "rb8)43H"&(!QX 
*1 && -
##At,,## 
/
ufdBB! 5DdL L ## 
s0    $C DC//DDD# #D98D9zraw_ops.ResourceApplyFtrlc                 ,   |d}t        j                  |d      }|	d}	t        j                  |	d      }	t        j                  |||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||g}d|d|d|	f}t        j4                  dd||||
      }d }|S )NFr   r   r$   s   ResourceApplyFtrlr   r   r   )r   rD   r   r   r   r[   r\   r   r   r   r#   r@   r  r  r:   r9   r8   s                    r;   r?  r?  =  s   K"";>+"!",,-BD[\66b"b(7SUX[b[j[jlsl{l{  ~E  ~K  ~K  MT  MZ  MZ  \c  \i  \i  kr  kw  kw  y@  yJ  yJ  LS  LY  LY  [b  [h  [h  jq  jx  jx  zA  zH  zH  JQ  JZ  JZ  \c  \j  \j  ls  l{  l{  }D  }K  }K  MT  M_  M_  ah  am  am  ov  o}  o}  F  M  M  [P  Q'9!*4RXsG$4$45#

 
 (8(8
9%!!&'*:*:;&ufdBBA,-6M&11\#)s?''	.r=   TV_ResourceApplyFtrlV2_Tc                 ~   t         j                   xs t        j                         }|j                  }|j                  r'	 t	        j
                  |d|| ||||||||d|	d|
      }|S |	d}	t        j                  |	d      }	|
d}
t        j                  |
d      }
t        j                   d| |||||||||	|
|      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
||      S # t        j                  $ r Y w xY w)a  Update '*var' according to the Ftrl-proximal scheme.

  accum_new = accum + grad * grad
  grad_with_shrinkage = grad + 2 * l2_shrinkage * var
  linear += grad_with_shrinkage +
      (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    linear: A `Tensor` of type `resource`. Should be from a Variable().
    grad: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      The gradient.
    lr: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `grad`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `grad`.
      L2 shrinkage regularization. Must be a scalar.
    l2_shrinkage: A `Tensor`. Must have the same type as `grad`.
    lr_power: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyFtrlV2r   r   Nr>  Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   %resource_apply_ftrl_v2_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   r   r[   r\   r   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                      r;   resource_apply_ftrl_v2rF  T  sw   H 
			0h..0$#\\	11#T3vtR
L(M;!68g n K"";>+"!",,-BD[\'883eF"$(0k5J$(*!QX 
*1 && -
##At,,## 
2
ufdBBh! 5DdL L ## 
0    %C D
C11D
	D
D& &D<;D<zraw_ops.ResourceApplyFtrlV2c                 2   |	d}	t        j                  |	d      }	|
d}
t        j                  |
d      }
t        j                  ||||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| ||||||||g	}d|d|	d|
f}t        j4                  dd||||      }d }|S )NFr   r   r$   s   ResourceApplyFtrlV2r   r   r   )r   rD   r   r   r   r[   r\   r   r   r   r   r#   r@   r  r  r:   r9   r8   s                     r;   rE  rE    s   K"";>+"!",,-BD[\66b"b,X`7acfipixix  {B  {J  {J  LS  LY  LY  [b  [h  [h  jq  jw  jw  y@  yE  yE  GN  GX  GX  Za  Zg  Zg  ip  iv  iv  x  xF  xF  HO  HV  HV  X_  Xh  Xh  jq  jx  jx  zA  zI  zI  KR  KY  KY  [b  [m  [m  ov  o{  o{  }D  }K  }K  MT  M[  M[  i^  _'9/8,4R\8sG$4$45#

 
 (8(8
9%!!&'*:*:;&ufdBBhO,-6M&3Q|#)s?''	.r=   !TV_ResourceApplyGradientDescent_Tc           
         t         j                   xs t        j                         }|j                  }|j                  r	 t	        j
                  |d|| ||d|      }|S |d}t        j                  |d      }t        j                   d| ||||      \  }	}	}
}|
S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||      S # t        j                  $ r Y w xY w)a  Update '*var' by subtracting 'alpha' * 'delta' from it.

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    alpha: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    delta: A `Tensor`. Must have the same type as `alpha`. The change.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, the subtraction will be protected by a lock;
      otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyGradientDescentr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   .resource_apply_gradient_descent_eager_fallbackr   r*   r+   r,   r-   )r   rz   r   r   r#   r3   r4   r8   r   r5   r6   r7   s               r;   resource_apply_gradient_descentrM    s!     
			0h..0$#\\11,dC{$g n K"";>+'88&CuE4?dL!QX 
*! && -
##At,,## 
;
ue4TK K## 
s0    B C!-CC! C!%C6 6DDz$raw_ops.ResourceApplyGradientDescentc                 N   |d}t        j                  |d      }t        j                  ||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}t/        j0                  | t        j2                        } | ||g}d|d|f}	t        j4                  dd||	||      }
d }
|
S )NFr   r$   s   ResourceApplyGradientDescentr   r   r   )r   rz   r   r   r#   r@   r  r  r:   r9   r8   s              r;   rL  rL    s   K"";>+66u~sW__^e^m^movo|o|  F  L  L  NU  N[  N[  ]d  ]i  ]i  kr  k|  k|  ~E  ~K  ~K  MT  MZ  MZ  \c  \j  \j  ls  lz  lz  |C  |L  |L  NU  N\  N\  ^e  ^m  ^m  ov  o}  o}  F  Q  Q  SZ  S_  S_  ah  ao  ao  qx  q  q  MB  C'9.5%sG$4$45#ue$,-5&<a$0C"&(' '	.r=   TV_ResourceApplyKerasMomentum_Tc                 f   t         j                   xs t        j                         }|j                  }	|	j                  r#	 t	        j
                  |d|| ||||d|d|      }
|
S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| |||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | ||||||||	      S # t        j                  $ r Y w xY w)a  Update '*var' according to the momentum scheme.

  Set use_nesterov = True if you want to use Nesterov momentum.

  accum = accum * momentum - lr * grad
  var += accum

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    momentum: A `Tensor`. Must have the same type as `lr`.
      Momentum. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, the tensor passed to compute grad will be
      var + momentum * accum, so in the end, the var you get is actually
      var + momentum * accum.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyKerasMomentumr   rp   Nr#  Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   ,resource_apply_keras_momentum_eager_fallbackr   r*   r+   r,   r-   r   rD   r   r   r   r   rp   r#   r3   r4   r8   r   r5   r6   r7   s                  r;   resource_apply_keras_momentumrT    sV   : 
			0h..0$#\\11*D#ub$-nlLg n K"";>+L##L.A,'88$#Ur/72=3?d	L!QX
 
*- && -
##At,,## 
9
ub$k#$D: : ## 
r  z"raw_ops.ResourceApplyKerasMomentumc	                    |d}t        j                  |d      }|d}t        j                  |d      }t        j                  |||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }	}
|
\  }}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||g}d|	d|d|f}t        j4                  dd||||      }d }|S )NFr   rp   r$   s   ResourceApplyKerasMomentumr   r   r   r   rD   r   r   r   r   rp   r#   r@   r  r  r:   r9   r8   s                 r;   rR  rR  1  sc   K"";>+L##L.A,66D(7KSSZSbSbdkdsdsu|  vC  vC  EL  ER  ER  T[  Ta  Ta  cj  co  co  qx  qB  qB  DK  DQ  DQ  SZ  S`  S`  bi  bp  bp  ry  r@  r@  BI  BR  BR  T[  Tb  Tb  dk  ds  ds  u|  uC  uC  EL  EW  EW  Y`  Ye  Ye  gn  gu  gu  w~  wE  wE  SH  I'9"2tXsG$4$45#

 
 (8(8
9%ub$1,-n&:A$0C"&(' '	.r=   TV_ResourceApplyMomentum_Tc                 f   t         j                   xs t        j                         }|j                  }	|	j                  r#	 t	        j
                  |d|| ||||d|d|      }
|
S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| |||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | ||||||||	      S # t        j                  $ r Y w xY w)a  Update '*var' according to the momentum scheme.

  Set use_nesterov = True if you want to use Nesterov momentum.

  accum = accum * momentum + grad
  var -= lr * accum

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    momentum: A `Tensor`. Must have the same type as `lr`.
      Momentum. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, the tensor passed to compute grad will be
      var - lr * momentum * accum, so in the end, the var you get is actually
      var - lr * momentum * accum.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyMomentumr   rp   Nr#  Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   &resource_apply_momentum_eager_fallbackr   r*   r+   r,   r-   rS  s                  r;   resource_apply_momentumr[  H  sU   : 
			0h..0$#\\11%tS%T8{NLBg n K"";>+L##L.A,'88S"4*2.:G!QX 
*+ && -
##At,,## 
3
ub$k#$D: : ## 
r  zraw_ops.ResourceApplyMomentumc	                    |d}t        j                  |d      }|d}t        j                  |d      }t        j                  |||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }	}
|
\  }}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||g}d|	d|d|f}t        j4                  dd||||      }d }|S )NFr   rp   r$   s   ResourceApplyMomentumr   r   r   rV  s                 r;   rZ  rZ    s`   K"";>+L##L.A,66D(7KSSZSbSbdkdsdsu|  vC  vC  EL  ER  ER  T[  Ta  Ta  cj  co  co  qx  qB  qB  DK  DQ  DQ  SZ  S`  S`  bi  bp  bp  ry  r@  r@  BI  BR  BR  T[  Tb  Tb  dk  ds  ds  u|  uC  uC  EL  EW  EW  Y`  Ye  Ye  gn  gu  gu  w~  wE  wE  SH  I'9"2tXsG$4$45#

 
 (8(8
9%ub$1,-n&5q#)s?''	.r=   TV_ResourceApplyPowerSign_Tc	                 6   t         j                   xs t        j                         }	|	j                  }
|
j                  r#	 t	        j
                  |	d|| ||||||d|      }|S |d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)a  Update '*var' according to the AddSign update.

  m_t <- beta1 * m_{t-1} + (1 - beta1) * g
  update <- exp(logbase * sign_decay * sign(g) * sign(m_t)) * g
  variable <- variable - lr_t * update

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    m: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    logbase: A `Tensor`. Must have the same type as `lr`. Must be a scalar.
    sign_decay: A `Tensor`. Must have the same type as `lr`. Must be a scalar.
    beta: A `Tensor`. Must have the same type as `lr`. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and m tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyPowerSignr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   (resource_apply_power_sign_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r   r{   r|   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                   r;   resource_apply_power_signra    s;   2 
			0h..0$#\\11&c1b':dM;0g n K"";>+'88 cQ2w-7d.9F!QX 
*% && -
##At,,## 
5
q"gz4!$8 8 ## 
r	  zraw_ops.ResourceApplyPowerSignc
                    |d}t        j                  |d      }t        j                  |||||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||||g}d|
d|f}t        j4                  dd|||	|      }d }|S )NFr   r$   s   ResourceApplyPowerSignr   r   r   )r   r   r   r   r{   r|   r   r   r#   r@   r  r  r:   r9   r8   s                  r;   r`  r`    sZ   K"";>+66GZQUW[7\^adkdsdsu|  vE  vE  GN  GT  GT  V]  Vc  Vc  el  er  er  t{  t@  t@  BI  BS  BS  U\  Ub  Ub  dk  dq  dq  sz  sA  sA  CJ  CQ  CQ  SZ  Sc  Sc  el  es  es  u|  uD  uD  FM  FT  FT  V]  Vh  Vh  jq  jv  jv  x  xF  xF  HO  HV  HV  dY  Z'9*3'2w
D$sG$4$45#
Q 0 01!q"gz4>,-5&6$0C"&(' '	.r=   !TV_ResourceApplyProximalAdagrad_Tc                 0   t         j                   xs t        j                         }|j                  }	|	j                  r"	 t	        j
                  |d|| |||||d|      }
|
S |d}t        j                  |d      }t        j                   d| |||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | ||||||||	      S # t        j                  $ r Y w xY w)a  Update '*var' and '*accum' according to FOBOS with Adagrad learning rate.

  accum += grad * grad
  prox_v = var - lr * grad * (1 / sqrt(accum))
  var = sign(prox_v)/(1+lr*l2) * max{|prox_v|-lr*l1,0}

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `lr`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `lr`.
      L2 regularization. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyProximalAdagradr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   .resource_apply_proximal_adagrad_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r[   r\   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                  r;   resource_apply_proximal_adagradrg    s5   2 
			0h..0$#\\11,dCBm[*g n K"";>+'88&Cu+-D4?dL!QX 
*% && -
##At,,## 
;
ub"b$Kd  ## 
0     B   C'3CC'&C'+C? ?DDz$raw_ops.ResourceApplyProximalAdagradc	                    |d}t        j                  |d      }t        j                  ||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }	}
|
\  }}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| |||||g}d|	d|f}t        j4                  dd||||      }d }|S )NFr   r$   s   ResourceApplyProximalAdagradr   r   r   )r   rD   r   r[   r\   r   r   r#   r@   r  r  r:   r9   r8   s                 r;   rf  rf  	  sE   K"";>+66BD7I3QXQ`Q`bibqbqsz  tA  tA  CJ  CP  CP  RY  R_  R_  ah  am  am  ov  o@  o@  BI  BO  BO  QX  Q^  Q^  `g  `n  `n  pw  p~  p~  @G  @P  @P  RY  R`  R`  bi  bq  bq  sz  sA  sA  CJ  CU  CU  W^  Wc  Wc  el  es  es  u|  uC  uC  QF  G'9 2r2tsG$4$45#

 
 (8(8
9%ub"b$/,-5&<a$0C"&(' '	.r=   )TV_ResourceApplyProximalGradientDescent_Tc                 *   t         j                   xs t        j                         }|j                  }|j                  r!	 t	        j
                  |d|| ||||d|
      }	|	S |d}t        j                  |d      }t        j                   d| ||||||      \  }}}}|S # t        j                  $ r }
t        j                  |
|       Y d}
~
nd}
~
wt        j                  $ r Y nw xY w	 t        | |||||||      S # t        j                  $ r Y w xY w)a1  Update '*var' as FOBOS algorithm with fixed learning rate.

  prox_v = var - alpha * delta
  var = sign(prox_v)/(1+alpha*l2) * max{|prox_v|-alpha*l1,0}

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    alpha: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `alpha`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `alpha`.
      L2 regularization. Must be a scalar.
    delta: A `Tensor`. Must have the same type as `alpha`. The change.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, the subtraction will be protected by a lock;
      otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  $ResourceApplyProximalGradientDescentr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   7resource_apply_proximal_gradient_descent_eager_fallbackr   r*   r+   r,   r-   )r   rz   r[   r\   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                 r;   (resource_apply_proximal_gradient_descentrn  0	  s/   . 
			0h..0$#\\114dC
E=+/g n K"";>+'88.Cu35U<G59	;!QX
 
*' && -
##At,,## 
D
ub"e4  ## 
s0    B C%1CC%$C%)C< <DDz,raw_ops.ResourceApplyProximalGradientDescentc                 Z   |d}t        j                  |d      }t        j                  ||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}	|	\  }}}}t/        j0                  | t        j2                        } | ||||g}
d|d|f}t        j4                  dd|
|||      }d }|S )NFr   r$   s$   ResourceApplyProximalGradientDescentr   r   r   )r   rz   r[   r\   r   r   r#   r@   r  r  r:   r9   r8   s                r;   rm  rm  f	  s+   K"";>+66r2u7MsU\UdUdfmfufuw~  xE  xE  GN  GT  GT  V]  Vc  Vc  el  eq  eq  sz  sD  sD  FM  FS  FS  U\  Ub  Ub  dk  dr  dr  t{  tB  tB  DK  DT  DT  V]  Vd  Vd  fm  fu  fu  w~  wE  wE  GN  GY  GY  [b  [g  [g  ip  iw  iw  y@  yG  yG  UJ  K'9$5"b%sG$4$45#ub"e,,-5&Da$0C"&(' '	.r=   TV_ResourceApplyRMSProp_Tc
                 <   t         j                   xs t        j                         }
|
j                  }|j                  r$	 t	        j
                  |
d|	| |||||||d|      }|S |d}t        j                  |d      }t        j                   d| |||||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||	       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
      S # t        j                  $ r Y w xY w)at  Update '*var' according to the RMSProp algorithm.

  Note that in dense implementation of this algorithm, ms and mom will
  update even if the grad is zero, but in this sparse implementation, ms
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  Delta = learning_rate * gradient / sqrt(mean_square + epsilon)

  ms <- rho * ms_{t-1} + (1-rho) * grad * grad
  mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)
  var <- var - mom

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    ms: A `Tensor` of type `resource`. Should be from a Variable().
    mom: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `lr`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `lr`.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, ms, and mom tensors is protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceApplyRMSPropr   Nr   Fr   )r&   r   r'   r(   r   r   r   r   r   r   r   &resource_apply_rms_prop_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r   rF   r   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                    r;   resource_apply_rms_proprt  x	  sB   F 
			0h..0$#\\11$dCS"c8}k3g n K"";>+'88CBCBC)17,7dD!QX 
*% && -
##At,,## 
3
r3C7D!$8 8 ## 
r  zraw_ops.ResourceApplyRMSPropc                    |d}t        j                  |d      }t        j                  |||||g|
t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||g}d|d|f}t        j4                  dd|||
|	      }d }|S )NFr   r$   s   ResourceApplyRMSPropr   r   r   )r   r   r   r   rF   r   r   r   r   r#   r@   r  r  r:   r9   r8   s                   r;   rs  rs  	  sp   K"";>+66C7TX7Y[^ahapapry  sB  sB  DK  DQ  DQ  SZ  S`  S`  bi  bo  bo  qx  q}  q}  F  P  P  RY  R_  R_  ah  an  an  pw  p~  p~  @G  @N  @N  PW  P`  P`  bi  bp  bp  ry  rA  rA  CJ  CQ  CQ  SZ  Se  Se  gn  gs  gs  u|  uC  uC  EL  ES  ES  aV  W'9'0$2sHgtsG$4$45#b'"2"23"sG$4$45#r3C7DA,-5&4a#)s?''	.r=    TV_ResourceSparseApplyAdadelta_T'TV_ResourceSparseApplyAdadelta_Tindicesindicesc
                 <   t         j                   xs t        j                         }
|
j                  }|j                  r$	 t	        j
                  |
d|	| |||||||d|      }|S |d}t        j                  |d      }t        j                   d| |||||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||	       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
      S # t        j                  $ r Y w xY w)a  var: Should be from a Variable().

  Args:
    var: A `Tensor` of type `resource`.
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    accum_update: A `Tensor` of type `resource`.
      : Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Learning rate. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `lr`.
      Decay factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyAdadeltar   Nr   F
r   rD   rE   r   rF   r   r   rx  r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   -resource_sparse_apply_adadelta_eager_fallbackr   r*   r+   r,   r-   )r   rD   rE   r   rF   r   r   rx  r   r#   r3   r4   r8   r   r5   r6   r7   s                    r;   resource_sparse_apply_adadeltar}  	  sD   2 
			0h..0$#\\11+T3|
C$Eg n K"";>+'88%3e4@R+.d/63>TK!QX 
*) && -
##At,,## 
:
ulBWdG!$8 8 ## 
r  z#raw_ops.ResourceSparseApplyAdadeltac                 j   |d}t        j                  |d      }t        j                  ||||g|
t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}t        j                  |g|
t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||g}d|d|d|f}t        j4                  dd|||
|	      }d }|S )NFr   r$   Tindicess   ResourceSparseApplyAdadeltar   r   r   )r   rD   rE   r   rF   r   r   rx  r   r#   r@   r  r  _attr_Tindicesr:   r9   r8   s                    r;   r|  r|  
  s   K"";>+66C$7OQTW^WfWfhohwhw  zA  zG  zG  IP  IV  IV  X_  Xe  Xe  gn  gs  gs  u|  uF  uF  HO  HU  HU  W^  Wd  Wd  fm  ft  ft  v}  vD  vD  FM  FV  FV  X_  Xf  Xf  ho  hw  hw  y@  yG  yG  IP  I[  I[  ]d  ]i  ]i  kr  ky  ky  {B  {I  {I  WL  M'9&2sGT'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%''g6F6FG,ulBWdGL,*nm&;Q$0C"&(' '	.r=   TV_ResourceSparseApplyAdagrad_T&TV_ResourceSparseApplyAdagrad_Tindicesc                 f   t         j                   xs t        j                         }|j                  }	|	j                  r#	 t	        j
                  |d|| ||||d|d|      }
|
S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| |||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | ||||||||	      S # t        j                  $ r Y w xY w)	a  Update relevant entries in '*var' and '*accum' according to the adagrad scheme.

  That is for rows we have grad for, we update var and accum as follows:
  accum += grad * grad
  var -= lr * grad * (1 / sqrt(accum))

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Learning rate. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyAdagradr   rP   Nr  FTr   rD   r   r   rx  r   rP   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   ,resource_sparse_apply_adagrad_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   rx  r   rP   r#   r3   r4   r8   r   r5   r6   r7   s                  r;   resource_sparse_apply_adagradr  
  sV   2 
			0h..0$#\\11*D#ub$^\Kg n K"";>+L##L.A,'88$#Ur.52=3?d	L!QX
 
*- && -
##At,,## 
9
ub$[#$D: : ## 
r  z"raw_ops.ResourceSparseApplyAdagradc	                 L   |d}t        j                  |d      }|d}t        j                  |d      }t        j                  ||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }	}
|
\  }}t        j                  |g|t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||g}d|	d|d|d|f}t        j4                  dd||||	      }d }|S )
NFr   TrP   r$   r  s   ResourceSparseApplyAdagradr   r   r   )r   rD   r   r   rx  r   rP   r#   r@   r  r  r  r:   r9   r8   s                  r;   r  r  X
  s   K"";>+L##L.A,66Dz3ZaZiZikrkxkx  {B  {H  {H  JQ  JW  JW  Y`  Ye  Ye  gn  gx  gx  zA  zG  zG  IP  IV  IV  X_  Xf  Xf  ho  hv  hv  x  xH  xH  JQ  JX  JX  Za  Zi  Zi  kr  ky  ky  {B  {M  {M  OV  O[  O[  ]d  ]k  ]k  mt  m{  m{  I~  '9*2t'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%ub$0,*nm~|-&:A$0C"&(' '	.r=   !TV_ResourceSparseApplyAdagradDA_T(TV_ResourceSparseApplyAdagradDA_Tindicesc                 B   t         j                   xs t        j                         }|j                  }|j                  r%	 t	        j
                  |d|
| ||||||||d|	      }|S |	d}	t        j                  |	d      }	t        j                   d| |||||||||	|
      \  }}}}|S # t        j                  $ r }t        j                  ||
       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|      S # t        j                  $ r Y w xY w)a  Update entries in '*var' and '*accum' according to the proximal adagrad scheme.

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    gradient_accumulator: A `Tensor` of type `resource`.
      Should be from a Variable().
    gradient_squared_accumulator: A `Tensor` of type `resource`.
      Should be from a Variable().
    grad: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    lr: A `Tensor`. Must have the same type as `grad`.
      Learning rate. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `grad`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `grad`.
      L2 regularization. Must be a scalar.
    global_step: A `Tensor` of type `int64`.
      Training step number. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyAdagradDAr   Nr   Fr   rY   rZ   r   rx  r   r[   r\   r]   r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   /resource_sparse_apply_adagrad_da_eager_fallbackr   r*   r+   r,   r-   )r   rY   rZ   r   rx  r   r[   r\   r]   r   r#   r3   r4   r8   r   r5   r6   r7   s                     r;    resource_sparse_apply_adagrad_dar  q
  sO   : 
			0h..0$#\\	11,dC9M$dGRR{$g n K"";>+'88&C=QEa-17r+-"+4?dL!QX 
*- && -
##At,,## 
<
#%A4
2r2{  ## 
r   z$raw_ops.ResourceSparseApplyAdagradDAc                    |	d}	t        j                  |	d      }	t        j                  ||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}t        j                  |g|t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }t/        j0                  |t        j                        }| ||||||||g	}d|d|d|	f}t        j4                  dd||||
      }d }|S )NFr   r$   r  s   ResourceSparseApplyAdagradDAr   r   r   )r   rY   rZ   r   rx  r   r[   r\   r]   r   r#   r@   r  r  r  r:   r9   r8   s                     r;   r  r  
  s   K"";>+66b"b7I3QXQ`Q`bibqbqsz  tA  tA  CJ  CP  CP  RY  R_  R_  ah  am  am  ov  o@  o@  BI  BO  BO  QX  Q^  Q^  `g  `n  `n  pw  p~  p~  @G  @P  @P  RY  R`  R`  bi  bq  bq  sz  sA  sA  CJ  CU  CU  W^  Wc  Wc  el  es  es  u|  uC  uC  QF  G'9 4R'>>y#PWP]P]_f_l_lOop.*7sG$4$45#//0DgFVFVW!%!7!78TV]VfVf!g&&{GMMB++-I4QXZ\^`bdfqr,*nm&<a$0C"&(' '	.r=   !TV_ResourceSparseApplyAdagradV2_T(TV_ResourceSparseApplyAdagradV2_Tindicesc	                 l   t         j                   xs t        j                         }	|	j                  }
|
j                  r$	 t	        j
                  |	d|| |||||d|d|      }|S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)	a;  Update relevant entries in '*var' and '*accum' according to the adagrad scheme.

  That is for rows we have grad for, we update var and accum as follows:
  accum += grad * grad
  var -= lr * grad * (1 / sqrt(accum))

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Learning rate. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyAdagradV2r   rP   Nr  FT	r   rD   r   r   r   rx  r   rP   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   /resource_sparse_apply_adagrad_v2_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   r   rx  r   rP   r#   r3   r4   r8   r   r5   r6   r7   s                   r;    resource_sparse_apply_adagrad_v2r  
  s]   6 
			0h..0$#\\	11,dCGg}k>g n K"";>+L##L.A,'88&Cu07d074?5AN!QX 
*/ && -
##At,,## 
<
ub'4k#$D: : ## 
0    "B= =DC++DDD D32D3z$raw_ops.ResourceSparseApplyAdagradV2c
                 R   |d}t        j                  |d      }|d}t        j                  |d      }t        j                  |||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}t        j                  |g|	t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| |||||g}d|
d|d|d|f}t        j4                  dd|||	|	      }d }|S )
NFr   TrP   r$   r  s   ResourceSparseApplyAdagradV2r   r   r   )r   rD   r   r   r   rx  r   rP   r#   r@   r  r  r  r:   r9   r8   s                   r;   r  r    s   K"";>+L##L.A,66GT7JCRYRaRacjcrcrt{  uB  uB  DK  DQ  DQ  SZ  S`  S`  bi  bn  bn  pw  pA  pA  CJ  CP  CP  RY  R_  R_  ah  ao  ao  qx  q  q  AH  AQ  AQ  SZ  Sa  Sa  cj  cr  cr  t{  tB  tB  DK  DV  DV  X_  Xd  Xd  fm  ft  ft  v}  vD  vD  RG  H'9!2w'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%ub'49,*nm~|-&<a$0C"&(' '	.r=   'TV_ResourceSparseApplyCenteredRMSProp_T.TV_ResourceSparseApplyCenteredRMSProp_Tindicesc                 H   t         j                   xs t        j                         }|j                  }|j                  r&	 t	        j
                  |d|| |||||||||	d|
      }|S |
d}
t        j                  |
d      }
t        j                   d| |||||||||	|
|      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
||      S # t        j                  $ r Y w xY w)a  Update '*var' according to the centered RMSProp algorithm.

  The centered RMSProp algorithm uses an estimate of the centered second moment
  (i.e., the variance) for normalization, as opposed to regular RMSProp, which
  uses the (uncentered) second moment. This often helps with training, but is
  slightly more expensive in terms of computation and memory.

  Note that in dense implementation of this algorithm, mg, ms, and mom will
  update even if the grad is zero, but in this sparse implementation, mg, ms,
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  mean_grad = decay * mean_grad + (1-decay) * gradient
  Delta = learning_rate * gradient / sqrt(mean_square + epsilon - mean_grad ** 2)

  ms <- rho * ms_{t-1} + (1-rho) * grad * grad
  mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)
  var <- var - mom

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    mg: A `Tensor` of type `resource`. Should be from a Variable().
    ms: A `Tensor` of type `resource`. Should be from a Variable().
    mom: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `lr`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `lr`.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var, ms and mom.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, mg, ms, and mom tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  "ResourceSparseApplyCenteredRMSPropr   Nr   Fr   r   r   r   r   rF   r   r   r   rx  r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   6resource_sparse_apply_centered_rms_prop_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r   r   rF   r   r   r   rx  r   r#   r3   r4   r8   r   r5   r6   r7   s                      r;   'resource_sparse_apply_centered_rms_propr  !  sR   X 
			0h..0$#\\112D#r2s
C7D'=+Og n K"";>+'88,#"137?6=D6=:E379!QX 
*- && -
##At,,## 
C
r2sBXwg!$8 8 ## 
s0    $B( (C/;CC/.C/3D D! D!z*raw_ops.ResourceSparseApplyCenteredRMSPropc                    |
d}
t        j                  |
d      }
t        j                  |||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}t        j                  |	g|t        j                  t        j                  g      \  }\  }	t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||||	g
}d|d|d|
f}t        j4                  dd||||      }d }|S )NFr   r$   r  s"   ResourceSparseApplyCenteredRMSPropr   r   r   )r   r   r   r   r   rF   r   r   r   rx  r   r#   r@   r  r  r  r:   r9   r8   s                      r;   r  r  o  s   K"";>+66C7TX7Y[^ahapapry  sB  sB  DK  DQ  DQ  SZ  S`  S`  bi  bo  bo  qx  q}  q}  F  P  P  RY  R_  R_  ah  an  an  pw  p~  p~  @G  @N  @N  PW  P`  P`  bi  bp  bp  ry  rA  rA  CJ  CQ  CQ  SZ  Se  Se  gn  gs  gs  u|  uC  uC  EL  ES  ES  aV  W'9'0$2sHgt'>>y#PWP]P]_f_l_lOop.*7sG$4$45#b'"2"23"b'"2"23"sG$4$45#r2sBXwgN,*nm&BA$0C"&(' '	.r=   TV_ResourceSparseApplyFtrl_T#TV_ResourceSparseApplyFtrl_Tindicesc                 ~   t         j                   xs t        j                         }|j                  }|j                  r'	 t	        j
                  |d|| ||||||||d|	d|
      }|S |	d}	t        j                  |	d      }	|
d}
t        j                  |
d      }
t        j                   d| |||||||||	|
|      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
||      S # t        j                  $ r Y w xY w)a#  Update relevant entries in '*var' according to the Ftrl-proximal scheme.

  That is for rows we have grad for, we update var, accum and linear as follows:
  accum_new = accum + grad * grad
  linear += grad - (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    linear: A `Tensor` of type `resource`. Should be from a Variable().
    grad: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    lr: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `grad`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `grad`.
      L2 regularization. Must be a scalar.
    lr_power: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyFtrlr   r   Nr>  Fr   rD   r   r   rx  r   r[   r\   r   r   r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   )resource_sparse_apply_ftrl_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   rx  r   r[   r\   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                      r;   resource_sparse_apply_ftrlr    sx   H 
			0h..0$#\\	11'sE64RX}k!68g n K"";>+"!",,-BD[\'88!s%(,g"&(8/:9N(,.!QX 
*3 && -
##At,,## 
6
ufdGRR! 5DdL L ## 
rG  zraw_ops.ResourceSparseApplyFtrlc                    |	d}	t        j                  |	d      }	|
d}
t        j                  |
d      }
t        j                  |||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}t        j                  |g|t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| ||||||||g	}d|d|d|	d|
f}t        j4                  dd||||      }d }|S )	NFr   r   r$   r  s   ResourceSparseApplyFtrlr   r   r   )r   rD   r   r   rx  r   r[   r\   r   r   r   r#   r@   r  r  r  r:   r9   r8   s                      r;   r  r    s   K"";>+"!",,-BD[\66b"b(7SUX[b[j[jlsl{l{  ~E  ~K  ~K  MT  MZ  MZ  \c  \i  \i  kr  kw  kw  y@  yJ  yJ  LS  LY  LY  [b  [h  [h  jq  jx  jx  zA  zH  zH  JQ  JZ  JZ  \c  \j  \j  ls  l{  l{  }D  }K  }K  MT  M_  M_  ah  am  am  ov  o}  o}  F  M  M  [P  Q'9!*4RX'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%!!&'*:*:;&ufdGRRJ,*nm&(=?&7$0C"&(' '	.r=   TV_ResourceSparseApplyFtrlV2_T%TV_ResourceSparseApplyFtrlV2_Tindicesc                    t         j                   xs t        j                         }|j                  }|j                  r(	 t	        j
                  |d|| |||||||||	d|
d|      }|S |
d}
t        j                  |
d      }
|d}t        j                  |d      }t        j                   d| |||||||||	|
||      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|||      S # t        j                  $ r Y w xY w)a  Update relevant entries in '*var' according to the Ftrl-proximal scheme.

  That is for rows we have grad for, we update var, accum and linear as follows:
  grad_with_shrinkage = grad + 2 * l2_shrinkage * var
  accum_new = accum + grad_with_shrinkage * grad_with_shrinkage
  linear += grad_with_shrinkage +
      (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    linear: A `Tensor` of type `resource`. Should be from a Variable().
    grad: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    lr: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `grad`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `grad`.
      L2 shrinkage regularization. Must be a scalar.
    l2_shrinkage: A `Tensor`. Must have the same type as `grad`.
    lr_power: A `Tensor`. Must have the same type as `grad`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyFtrlV2r   r   Nr>  Fr   rD   r   r   rx  r   r[   r\   r   r   r   r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   ,resource_sparse_apply_ftrl_v2_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r   rx  r   r[   r\   r   r   r   r   r#   r3   r4   r8   r   r5   r6   r7   s                       r;   resource_sparse_apply_ftrl_v2r    s   N 
			0h..0$#\\	11)4eVTR\8],.CEg n K"";>+"!",,-BD[\'88#E&*.B2(*.61<;P*.0!QX 
*5 && -
##At,,## 
9
ufdGRR
 5DdL L ## 
r&  z!raw_ops.ResourceSparseApplyFtrlV2c                    |
d}
t        j                  |
d      }
|d}t        j                  |d      }t        j                  ||||||	g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}}	t        j                  |g|t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| |||||||||	g
}d|d|d|
d|f}t        j4                  dd||||      }d }|S )	NFr   r   r$   r  s   ResourceSparseApplyFtrlV2r   r   r   )r   rD   r   r   rx  r   r[   r\   r   r   r   r   r#   r@   r  r  r  r:   r9   r8   s                       r;   r  r  9  s   K"";>+"!",,-BD[\66b"b,X`7acfipixix  {B  {J  {J  LS  LY  LY  [b  [h  [h  jq  jw  jw  y@  yE  yE  GN  GX  GX  Za  Zg  Zg  ip  iv  iv  x  xF  xF  HO  HV  HV  X_  Xh  Xh  jq  jx  jx  zA  zI  zI  KR  KY  KY  [b  [m  [m  ov  o{  o{  }D  }K  }K  MT  M[  M[  i^  _'9/8,4R\8'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%!!&'*:*:;&ufdGRRxX,*nm&(=?&91$0C"&(' '	.r=   %TV_ResourceSparseApplyKerasMomentum_T,TV_ResourceSparseApplyKerasMomentum_Tindicesc	                 l   t         j                   xs t        j                         }	|	j                  }
|
j                  r$	 t	        j
                  |	d|| |||||d|d|      }|S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)a  Update relevant entries in '*var' and '*accum' according to the momentum scheme.

  Set use_nesterov = True if you want to use Nesterov momentum.

  That is for rows we have grad for, we update var and accum as follows:

  accum = accum * momentum - lr * grad
  var += accum

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Learning rate. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    momentum: A `Tensor`. Must have the same type as `lr`.
      Momentum. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, the tensor passed to compute grad will be
      var + momentum * accum, so in the end, the var you get is actually
      var + momentum * accum.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
   ResourceSparseApplyKerasMomentumr   rp   Nr#  F	r   rD   r   r   rx  r   r   rp   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   3resource_sparse_apply_keras_momentum_eager_fallbackr   r*   r+   r,   r-   r   rD   r   r   rx  r   r   rp   r#   r3   r4   r8   r   r5   r6   r7   s                   r;   $resource_sparse_apply_keras_momentumr  S  s^   B 
			0h..0$#\\	110$UB=+~g n K"";>+L##L.A,'88*5R15w5=8C9E157!QX 
*1 && -
##At,,## 
@
ub${#$D: : ## 
r  z(raw_ops.ResourceSparseApplyKerasMomentumc
                 R   |d}t        j                  |d      }|d}t        j                  |d      }t        j                  |||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}t        j                  |g|	t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| |||||g}d|
d|d|d|f}t        j4                  dd|||	|      }d }|S )	NFr   rp   r$   r  s    ResourceSparseApplyKerasMomentumr   r   r   r   rD   r   r   rx  r   r   rp   r#   r@   r  r  r  r:   r9   r8   s                   r;   r  r    s   K"";>+L##L.A,66D(7KSSZSbSbdkdsdsu|  vC  vC  EL  ER  ER  T[  Ta  Ta  cj  co  co  qx  qB  qB  DK  DQ  DQ  SZ  S`  S`  bi  bp  bp  ry  r@  r@  BI  BR  BR  T[  Tb  Tb  dk  ds  ds  u|  uC  uC  EL  EW  EW  Y`  Ye  Ye  gn  gu  gu  w~  wE  wE  SH  I'9"2tX'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%ub$:,*nm~|-&@!$0C"&(' '	.r=    TV_ResourceSparseApplyMomentum_T'TV_ResourceSparseApplyMomentum_Tindicesc	                 l   t         j                   xs t        j                         }	|	j                  }
|
j                  r$	 t	        j
                  |	d|| |||||d|d|      }|S |d}t        j                  |d      }|d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)a  Update relevant entries in '*var' and '*accum' according to the momentum scheme.

  Set use_nesterov = True if you want to use Nesterov momentum.

  That is for rows we have grad for, we update var and accum as follows:

  accum = accum * momentum + grad
  var -= lr * accum

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Learning rate. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    momentum: A `Tensor`. Must have the same type as `lr`.
      Momentum. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, the tensor passed to compute grad will be
      var - lr * momentum * accum, so in the end, the var you get is actually
      var - lr * momentum * accum.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyMomentumr   rp   Nr#  Fr  )r&   r   r'   r(   r   r   r   r   r   r   r   -resource_sparse_apply_momentum_eager_fallbackr   r*   r+   r,   r-   r  s                   r;   resource_sparse_apply_momentumr    s]   B 
			0h..0$#\\	11+T3r4=+~g n K"";>+L##L.A,'88%3e/63>4@t	M!QX
 
*- && -
##At,,## 
:
ub${#$D: : ## 
r  z#raw_ops.ResourceSparseApplyMomentumc
                 R   |d}t        j                  |d      }|d}t        j                  |d      }t        j                  |||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}t        j                  |g|	t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| |||||g}d|
d|d|d|f}t        j4                  dd|||	|      }d }|S )	NFr   rp   r$   r  s   ResourceSparseApplyMomentumr   r   r   r  s                   r;   r  r    s   K"";>+L##L.A,66D(7KSSZSbSbdkdsdsu|  vC  vC  EL  ER  ER  T[  Ta  Ta  cj  co  co  qx  qB  qB  DK  DQ  DQ  SZ  S`  S`  bi  bp  bp  ry  r@  r@  BI  BR  BR  T[  Tb  Tb  dk  ds  ds  u|  uC  uC  EL  EW  EW  Y`  Ye  Ye  gn  gu  gu  w~  wE  wE  SH  I'9"2tX'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%ub$:,*nm~|-&;Q$0C"&(' '	.r=   'TV_ResourceSparseApplyProximalAdagrad_T.TV_ResourceSparseApplyProximalAdagrad_Tindicesc	                 6   t         j                   xs t        j                         }	|	j                  }
|
j                  r#	 t	        j
                  |	d|| ||||||d|      }|S |d}t        j                  |d      }t        j                   d| ||||||||
      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	
      S # t        j                  $ r Y w xY w)a  Sparse update entries in '*var' and '*accum' according to FOBOS algorithm.

  That is for rows we have grad for, we update var and accum as follows:
  accum += grad * grad
  prox_v = var
  prox_v -= lr * grad * (1 / sqrt(accum))
  var = sign(prox_v)/(1+lr*l2) * max{|prox_v|-lr*l1,0}

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    accum: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Learning rate. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `lr`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `lr`.
      L2 regularization. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  "ResourceSparseApplyProximalAdagradr   Nr   F	r   rD   r   r[   r\   r   rx  r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   5resource_sparse_apply_proximal_adagrad_eager_fallbackr   r*   r+   r,   r-   )r   rD   r   r[   r\   r   rx  r   r#   r3   r4   r8   r   r5   r6   r7   s                   r;   &resource_sparse_apply_proximal_adagradr    s<   : 
			0h..0$#\\112D#ub"
D'=+7g n K"";>+'88,#Ur136=:E379!QX 
*) && -
##At,,## 
B
ub"b$[  ## 
r	  z*raw_ops.ResourceSparseApplyProximalAdagradc
                     |d}t        j                  |d      }t        j                  ||||g|	t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }
}|\  }}}}t        j                  |g|	t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }| ||||||g}d|
d|d|f}t        j4                  dd|||	|      }d }|S )NFr   r$   r  s"   ResourceSparseApplyProximalAdagradr   r   r   )r   rD   r   r[   r\   r   rx  r   r#   r@   r  r  r  r:   r9   r8   s                   r;   r  r  L  s~   K"";>+66BD7I3QXQ`Q`bibqbqsz  tA  tA  CJ  CP  CP  RY  R_  R_  ah  am  am  ov  o@  o@  BI  BO  BO  QX  Q^  Q^  `g  `n  `n  pw  p~  p~  @G  @P  @P  RY  R`  R`  bi  bq  bq  sz  sA  sA  CJ  CU  CU  W^  Wc  Wc  el  es  es  u|  uC  uC  QF  G'9 2r2t'>>y#PWP]P]_f_l_lOop.*7sG$4$45#

 
 (8(8
9%ub"b$8,*nm&BA$0C"&(' '	.r=   /TV_ResourceSparseApplyProximalGradientDescent_T6TV_ResourceSparseApplyProximalGradientDescent_Tindicesc                 0   t         j                   xs t        j                         }|j                  }	|	j                  r"	 t	        j
                  |d|| |||||d|      }
|
S |d}t        j                  |d      }t        j                   d| |||||||	      \  }}}}|S # t        j                  $ r }t        j                  ||       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | ||||||||	      S # t        j                  $ r Y w xY w)a  Sparse update '*var' as FOBOS algorithm with fixed learning rate.

  That is for rows we have grad for, we update var as follows:
  prox_v = var - alpha * grad
  var = sign(prox_v)/(1+alpha*l2) * max{|prox_v|-alpha*l1,0}

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    alpha: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `alpha`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `alpha`.
      L2 regularization. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `alpha`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, the subtraction will be protected by a lock;
      otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  *ResourceSparseApplyProximalGradientDescentr   Nr   Fr   rz   r[   r\   r   rx  r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   >resource_sparse_apply_proximal_gradient_descent_eager_fallbackr   r*   r+   r,   r-   )r   rz   r[   r\   r   rx  r   r#   r3   r4   r8   r   r5   r6   r7   s                  r;   /resource_sparse_apply_proximal_gradient_descentr  b  s7   4 
			0h..0$#\\11:D#u
Bg}k;g n K"";>+'884#U9;>EBM;?A!QX 
*) && -
##At,,## 
K
ub"dG  ## 
rh  z2raw_ops.ResourceSparseApplyProximalGradientDescentc	                    |d}t        j                  |d      }t        j                  ||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }	}
|
\  }}}}t        j                  |g|t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } | |||||g}d|	d|d|f}t        j4                  dd||||      }d }|S )NFr   r$   r  s*   ResourceSparseApplyProximalGradientDescentr   r   r   )r   rz   r[   r\   r   rx  r   r#   r@   r  r  r  r:   r9   r8   s                  r;   r  r    sd   K"";>+66r2t7LcT[TcTceletetv}  wD  wD  FM  FS  FS  U\  Ub  Ub  dk  dp  dp  ry  rC  rC  EL  ER  ER  T[  Ta  Ta  cj  cq  cq  sz  sA  sA  CJ  CS  CS  U\  Uc  Uc  el  et  et  v}  vD  vD  FM  FX  FX  Za  Zf  Zf  ho  hv  hv  x  xF  xF  TI  J'9#5"b$'>>y#PWP]P]_f_l_lOop.*7sG$4$45#ub"dG4,*nm&JA$0C"&(' '	.r=   TV_ResourceSparseApplyRMSProp_T&TV_ResourceSparseApplyRMSProp_Tindicesc                 B   t         j                   xs t        j                         }|j                  }|j                  r%	 t	        j
                  |d|
| ||||||||d|	      }|S |	d}	t        j                  |	d      }	t        j                   d| |||||||||	|
      \  }}}}|S # t        j                  $ r }t        j                  ||
       Y d}~nd}~wt        j                  $ r Y nw xY w	 t        | |||||||||	|
|      S # t        j                  $ r Y w xY w)a
  Update '*var' according to the RMSProp algorithm.

  Note that in dense implementation of this algorithm, ms and mom will
  update even if the grad is zero, but in this sparse implementation, ms
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  Delta = learning_rate * gradient / sqrt(mean_square + epsilon)

  ms <- rho * ms_{t-1} + (1-rho) * grad * grad
  mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)
  var <- var - mom

  Args:
    var: A `Tensor` of type `resource`. Should be from a Variable().
    ms: A `Tensor` of type `resource`. Should be from a Variable().
    mom: A `Tensor` of type `resource`. Should be from a Variable().
    lr: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `lr`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `lr`.
    epsilon: A `Tensor`. Must have the same type as `lr`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `lr`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var, ms and mom.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, ms, and mom tensors is protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    The created Operation.
  ResourceSparseApplyRMSPropr   Nr   Fr   r   r   r   rF   r   r   r   rx  r   r#   )r&   r   r'   r(   r   r   r   r   r   r   r   -resource_sparse_apply_rms_prop_eager_fallbackr   r*   r+   r,   r-   )r   r   r   r   rF   r   r   r   rx  r   r#   r3   r4   r8   r   r5   r6   r7   s                     r;   resource_sparse_apply_rms_propr    sJ   J 
			0h..0$#\\11*D#r3C'4-Fg n K"";>+'88$#"#"#/7+/2=D	J!QX
 
*' && -
##At,,## 
:
r3C7D'!$8 8 ## 
r   z"raw_ops.ResourceSparseApplyRMSPropc                 p   |	d}	t        j                  |	d      }	t        j                  |||||g|t        j                  t        j
                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                  t        j                   t        j"                  t        j$                  t        j&                  t        j(                  t        j*                  t        j,                  g      \  }}|\  }}}}}t        j                  |g|t        j                  t        j                  g      \  }\  }t/        j0                  | t        j2                        } t/        j0                  |t        j2                        }t/        j0                  |t        j2                        }| ||||||||g	}d|d|d|	f}t        j4                  dd||||
      }d }|S )NFr   r$   r  s   ResourceSparseApplyRMSPropr   r   r   )r   r   r   r   rF   r   r   r   rx  r   r#   r@   r  r  r  r:   r9   r8   s                     r;   r  r    s   K"";>+66C7TX7Y[^ahapapry  sB  sB  DK  DQ  DQ  SZ  S`  S`  bi  bo  bo  qx  q}  q}  F  P  P  RY  R_  R_  ah  an  an  pw  p~  p~  @G  @N  @N  PW  P`  P`  bi  bp  bp  ry  rA  rA  CJ  CQ  CQ  SZ  Se  Se  gn  gs  gs  u|  uC  uC  EL  ES  ES  aV  W'9'0$2sHgt'>>y#PWP]P]_f_l_lOop.*7sG$4$45#b'"2"23"sG$4$45#r3C7D'J,*nm&:A$0C"&(' '	.r=   TV_SparseApplyAdadelta_TTV_SparseApplyAdadelta_Tindicesc
                    t         j                   xs t        j                         }
|
j                  }|j                  rt	        d      |d}t        j                  |d      }t        j                  d| |||||||||	      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a?  var: Should be from a Variable().

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    accum_update: A mutable `Tensor`. Must have the same type as `var`.
      : Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Learning rate. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `var`.
      Decay factor. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Nsparse_apply_adadelta op does not support eager execution. Arg 'out' is a ref.NFr   SparseApplyAdadeltar{  r$   r  r%   )r   rD   rE   r   rF   r   r   rx  r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                     r;   sparse_apply_adadeltar    s   4 
			0h..0$#\\
g
hhK"";>+'883e,8RS'.T7+6T	C!QX
 QK'""$3%%c*J  ,m  /1F ::L|VW>('	.r=   zraw_ops.SparseApplyAdadeltac                     t        d      )Nr  r?   )r   rD   rE   r   rF   r   r   rx  r   r#   r@   s              r;   $sparse_apply_adadelta_eager_fallbackr  A      effr=   TV_SparseApplyAdagrad_TTV_SparseApplyAdagrad_Tindicesc                 T   t         j                   xs t        j                         }|j                  }	|	j                  rt	        d      |d}t        j                  |d      }|d}t        j                  |d      }t        j                  d| |||||||	      \  }
}
}}|dd }t        j                         rjd	|j                  d	      d
|j                  d
      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a4  Update relevant entries in '*var' and '*accum' according to the adagrad scheme.

  That is for rows we have grad for, we update var and accum as follows:
  $$accum += grad * grad$$
  $$var -= lr * grad * (1 / sqrt(accum))$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Learning rate. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Msparse_apply_adagrad op does not support eager execution. Arg 'out' is a ref.NFr   TrP   SparseApplyAdagradr  r$   r  r%   )r   rD   r   r   rx  r   rP   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                   r;   sparse_apply_adagradr  G  s0   6 
			0h..0$#\\
f
ggK"";>+L##L.A,'88#Ur&-;+7dD!QX QK'""$3%%c*J  ,m  /  02F ::LlFG=('	.r=   zraw_ops.SparseApplyAdagradc	                     t        d      )Nr  r?   )	r   rD   r   r   rx  r   rP   r#   r@   s	            r;   #sparse_apply_adagrad_eager_fallbackr        deer=   TV_SparseApplyAdagradDA_T TV_SparseApplyAdagradDA_Tindicesc                    t         j                   xs t        j                         }|j                  }|j                  rt	        d      |	d}	t        j                  |	d      }	t        j                  d| |||||||||	|
      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a	  Update entries in '*var' and '*accum' according to the proximal adagrad scheme.

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    gradient_accumulator: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    gradient_squared_accumulator: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    lr: A `Tensor`. Must have the same type as `var`.
      Learning rate. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    global_step: A `Tensor` of type `int64`.
      Training step number. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Psparse_apply_adagrad_da op does not support eager execution. Arg 'out' is a ref.NFr   SparseApplyAdagradDAr  r$   r  r%   )r   rY   rZ   r   rx  r   r[   r\   r]   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                      r;   sparse_apply_adagrad_dar    s   : 
			0h..0$#\\
i
jjK"";>+'88C5I=Y%)7rb#%;,7dD!QX QK'""$3%%c*J  ,m  /1F ::Lfg?('	.r=   zraw_ops.SparseApplyAdagradDAc                     t        d      )Nr  r?   )r   rY   rZ   r   rx  r   r[   r\   r]   r   r#   r@   s               r;   &sparse_apply_adagrad_da_eager_fallbackr    r   r=   TV_SparseApplyAdagradV2_T TV_SparseApplyAdagradV2_Tindicesc	                 V   t         j                   xs t        j                         }	|	j                  }
|
j                  rt	        d      |d}t        j                  |d      }|d}t        j                  |d      }t        j                  d| ||||||||
      \  }}}}|dd }t        j                         rjd	|j                  d	      d
|j                  d
      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )a  Update relevant entries in '*var' and '*accum' according to the adagrad scheme.

  That is for rows we have grad for, we update var and accum as follows:
  $$accum += grad * grad$$
  $$var -= lr * grad * (1 / sqrt(accum))$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Learning rate. Must be a scalar.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Constant factor. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    update_slots: An optional `bool`. Defaults to `True`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Psparse_apply_adagrad_v2 op does not support eager execution. Arg 'out' is a ref.NFr   TrP   SparseApplyAdagradV2r  r$   r  r%   )r   rD   r   r   r   rx  r   rP   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                    r;   sparse_apply_adagrad_v2r    s3   : 
			0h..0$#\\
i
jjK"";>+L##L.A,'88CuW%)7,7-9	F!QX
 QK'""$3%%c*J  ,m  /  02F ::Lfg?('	.r=   zraw_ops.SparseApplyAdagradV2c
                     t        d      )Nr  r?   )
r   rD   r   r   r   rx  r   rP   r#   r@   s
             r;   &sparse_apply_adagrad_v2_eager_fallbackr    r   r=   TV_SparseApplyCenteredRMSProp_T&TV_SparseApplyCenteredRMSProp_Tindicesc                    t         j                   xs t        j                         }|j                  }|j                  rt	        d      |
d}
t        j                  |
d      }
t        j                  d| |||||||||	|
|      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	av	  Update '*var' according to the centered RMSProp algorithm.

  The centered RMSProp algorithm uses an estimate of the centered second moment
  (i.e., the variance) for normalization, as opposed to regular RMSProp, which
  uses the (uncentered) second moment. This often helps with training, but is
  slightly more expensive in terms of computation and memory.

  Note that in dense implementation of this algorithm, mg, ms, and mom will
  update even if the grad is zero, but in this sparse implementation, mg, ms,
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  mean_grad = decay * mean_grad + (1-decay) * gradient
  Delta = learning_rate * gradient / sqrt(mean_square + epsilon - mean_grad ** 2)

  $$ms <- rho * ms_{t-1} + (1-rho) * grad * grad$$
  $$mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)$$
  $$var <- var - mom$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    mg: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    ms: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    mom: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `var`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `var`.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var, ms and mom.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, mg, ms, and mom tensors is
      protected by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Wsparse_apply_centered_rms_prop op does not support eager execution. Arg 'out' is a ref.NFr   SparseApplyCenteredRMSPropr  r$   r  r%   )r   r   r   r   r   rF   r   r   r   rx  r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                       r;   sparse_apply_centered_rms_propr    s   ` 
			0h..0$#\\
p
qqK"";>+'88$#"*-.5D.52=DJ!QX QK'""$3%%c*J  ,m  /1F ::L$lFGE('	.r=   z"raw_ops.SparseApplyCenteredRMSPropc                     t        d      )Nr   r?   )r   r   r   r   r   rF   r   r   r   rx  r   r#   r@   s                r;   -sparse_apply_centered_rms_prop_eager_fallbackr  T  s    noor=   TV_SparseApplyFtrl_TTV_SparseApplyFtrl_Tindicesc                 \   t         j                   xs t        j                         }|j                  }|j                  rt	        d      |	d}	t        j                  |	d      }	|
d}
t        j                  |
d      }
t        j                  d| |||||||||	|
|      \  }}}}|dd }t        j                         rjd|j                  d      d	|j                  d	      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )
a  Update relevant entries in '*var' according to the Ftrl-proximal scheme.

  That is for rows we have grad for, we update var, accum and linear as follows:
  $$accum_new = accum + grad * grad$$
  $$linear += grad + (accum_{new}^{-lr_{power}} - accum^{-lr_{power}} / lr * var$$
  $$quadratic = 1.0 / (accum_{new}^{lr_{power}} * lr) + 2 * l2$$
  $$var = (sign(linear) * l1 - linear) / quadratic\ if\ |linear| > l1\ else\ 0.0$$
  $$accum = accum_{new}$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    linear: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    lr_power: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Jsparse_apply_ftrl op does not support eager execution. Arg 'out' is a ref.NFr   r   SparseApplyFtrlr  r$   r  r%   )r   rD   r   r   rx  r   r[   r\   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                       r;   sparse_apply_ftrlr
  Z  s@   L 
			0h..0$#\\
c
ddK"";>+"!",,-BD[\'88s%T#*rbR$,+1F $&!QX QK'""$3%%c*J  ,m  /1H  !89;F ::L<:('	.r=   zraw_ops.SparseApplyFtrlc                     t        d      )Nr  r?   )r   rD   r   r   rx  r   r[   r\   r   r   r   r#   r@   s                r;    sparse_apply_ftrl_eager_fallbackr    s    abbr=   TV_SparseApplyFtrlV2_TTV_SparseApplyFtrlV2_Tindicesc                 ^   t         j                   xs t        j                         }|j                  }|j                  rt	        d      |
d}
t        j                  |
d      }
|d}t        j                  |d      }t        j                  d| |||||||||	|
||      \  }}}}|dd }t        j                         rjd|j                  d      d	|j                  d	      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )
a  Update relevant entries in '*var' according to the Ftrl-proximal scheme.

  That is for rows we have grad for, we update var, accum and linear as follows:
  grad_with_shrinkage = grad + 2 * l2_shrinkage * var
  accum_new = accum + grad * grad
  linear += grad_with_shrinkage -
      (accum_new^(-lr_power) - accum^(-lr_power)) / lr * var
  quadratic = 1.0 / (accum_new^(lr_power) * lr) + 2 * l2
  var = (sign(linear) * l1 - linear) / quadratic if |linear| > l1 else 0.0
  accum = accum_new

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    linear: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 shrinkage regularization. Must be a scalar.
    l2_shrinkage: A `Tensor`. Must have the same type as `var`.
    lr_power: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    multiply_linear_by_lr: An optional `bool`. Defaults to `False`.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Msparse_apply_ftrl_v2 op does not support eager execution. Arg 'out' is a ref.NFr   r   SparseApplyFtrlV2r  r$   r  r%   )r   rD   r   r   rx  r   r[   r\   r   r   r   r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                        r;   sparse_apply_ftrl_v2r    sC   R 
			0h..0$#\\
f
ggK"";>+"!",,-BD[\'88E&t%,r*6)43H"&(!QX QK'""$3%%c*J  ,m  /1H  !89;F ::L\67<('	.r=   zraw_ops.SparseApplyFtrlV2c                     t        d      )Nr  r?   )r   rD   r   r   rx  r   r[   r\   r   r   r   r   r#   r@   s                 r;   #sparse_apply_ftrl_v2_eager_fallbackr    r  r=   TV_SparseApplyMomentum_TTV_SparseApplyMomentum_Tindicesc	                 V   t         j                   xs t        j                         }	|	j                  }
|
j                  rt	        d      |d}t        j                  |d      }|d}t        j                  |d      }t        j                  d| ||||||||
      \  }}}}|dd }t        j                         rjd|j                  d      d	|j                  d	      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )
az  Update relevant entries in '*var' and '*accum' according to the momentum scheme.

  Set use_nesterov = True if you want to use Nesterov momentum.

  That is for rows we have grad for, we update var and accum as follows:

  $$accum = accum * momentum + grad$$
  $$var -= lr * accum$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Learning rate. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    momentum: A `Tensor`. Must have the same type as `var`.
      Momentum. Must be a scalar.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var and accum tensors will be protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    use_nesterov: An optional `bool`. Defaults to `False`.
      If `True`, the tensor passed to compute grad will be
      var - lr * momentum * accum, so in the end, the var you get is actually
      var - lr * momentum * accum.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Nsparse_apply_momentum op does not support eager execution. Arg 'out' is a ref.NFr   rp   SparseApplyMomentumr  r$   r  r%   )r   rD   r   r   rx  r   r   rp   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                    r;   sparse_apply_momentumr    s4   F 
			0h..0$#\\
g
hhK"";>+L##L.A,'883e'.+6,8t	E!QX
 QK'""$3%%c*J  ,m  /  02F ::L|VW>('	.r=   zraw_ops.SparseApplyMomentumc
                     t        d      )Nr  r?   )
r   rD   r   r   rx  r   r   rp   r#   r@   s
             r;   $sparse_apply_momentum_eager_fallbackr  8  r  r=   TV_SparseApplyProximalAdagrad_T&TV_SparseApplyProximalAdagrad_Tindicesc	                     t         j                   xs t        j                         }	|	j                  }
|
j                  rt	        d      |d}t        j                  |d      }t        j                  d| ||||||||
      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a   Sparse update entries in '*var' and '*accum' according to FOBOS algorithm.

  That is for rows we have grad for, we update var and accum as follows:
  $$accum += grad * grad$$
  $$prox_v = var$$
  $$prox_v -= lr * grad * (1 / sqrt(accum))$$
  $$var = sign(prox_v)/(1+lr*l2) * max{|prox_v|-lr*l1,0}$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    accum: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Learning rate. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, updating of the var and accum tensors will be protected by
      a lock; otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Vsparse_apply_proximal_adagrad op does not support eager execution. Arg 'out' is a ref.NFr   SparseApplyProximalAdagradr  r$   r  r%   )r   rD   r   r[   r\   r   rx  r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                    r;   sparse_apply_proximal_adagradr"  >  s   > 
			0h..0$#\\
o
ppK"";>+'88$#Urb)+$2=DJ!QX QK'""$3%%c*J  ,m  /1F ::L$lFGE('	.r=   z"raw_ops.SparseApplyProximalAdagradc
                     t        d      )Nr   r?   )
r   rD   r   r[   r\   r   rx  r   r#   r@   s
             r;   ,sparse_apply_proximal_adagrad_eager_fallbackr$  w  s    mnnr=   'TV_SparseApplyProximalGradientDescent_T.TV_SparseApplyProximalGradientDescent_Tindicesc                    t         j                   xs t        j                         }|j                  }	|	j                  rt	        d      |d}t        j                  |d      }t        j                  d| |||||||	      \  }
}
}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	aC  Sparse update '*var' as FOBOS algorithm with fixed learning rate.

  That is for rows we have grad for, we update var as follows:
  $$prox_v = var - alpha * grad$$
  $$var = sign(prox_v)/(1+alpha*l2) * max{|prox_v|-alpha*l1,0}$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    alpha: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    l1: A `Tensor`. Must have the same type as `var`.
      L1 regularization. Must be a scalar.
    l2: A `Tensor`. Must have the same type as `var`.
      L2 regularization. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var and accum.
    use_locking: An optional `bool`. Defaults to `False`.
      If True, the subtraction will be protected by a lock;
      otherwise the behavior is undefined, but may exhibit less contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  _sparse_apply_proximal_gradient_descent op does not support eager execution. Arg 'out' is a ref.NFr   "SparseApplyProximalGradientDescentr  r$   r  r%   )r   rz   r[   r\   r   rx  r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                   r;   &sparse_apply_proximal_gradient_descentr*  }  s   6 
			0h..0$#\\
x
yyK"";>+'88,#Ur13$6=:E379!QX QK'""$3%%c*J  ,m  /1F ::L,lFGM('	.r=   z*raw_ops.SparseApplyProximalGradientDescentc	                     t        d      )Nr(  r?   )	r   rz   r[   r\   r   rx  r   r#   r@   s	            r;   5sparse_apply_proximal_gradient_descent_eager_fallbackr,    s    vwwr=   TV_SparseApplyRMSProp_TTV_SparseApplyRMSProp_Tindicesc                    t         j                   xs t        j                         }|j                  }|j                  rt	        d      |	d}	t        j                  |	d      }	t        j                  d| |||||||||	|
      \  }}}}|dd }t        j                         rYd|j                  d      d|j                  d      d|j                  d      f}|j                  }t        j                  d|||       |\  }|S )	a  Update '*var' according to the RMSProp algorithm.

  Note that in dense implementation of this algorithm, ms and mom will
  update even if the grad is zero, but in this sparse implementation, ms
  and mom will not update in iterations during which the grad is zero.

  mean_square = decay * mean_square + (1-decay) * gradient ** 2
  Delta = learning_rate * gradient / sqrt(mean_square + epsilon)

  $$ms <- rho * ms_{t-1} + (1-rho) * grad * grad$$
  $$mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)$$
  $$var <- var - mom$$

  Args:
    var: A mutable `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `uint8`, `int16`, `int8`, `complex64`, `int64`, `qint8`, `quint8`, `qint32`, `bfloat16`, `qint16`, `quint16`, `uint16`, `complex128`, `half`, `uint32`, `uint64`.
      Should be from a Variable().
    ms: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    mom: A mutable `Tensor`. Must have the same type as `var`.
      Should be from a Variable().
    lr: A `Tensor`. Must have the same type as `var`.
      Scaling factor. Must be a scalar.
    rho: A `Tensor`. Must have the same type as `var`.
      Decay rate. Must be a scalar.
    momentum: A `Tensor`. Must have the same type as `var`.
    epsilon: A `Tensor`. Must have the same type as `var`.
      Ridge term. Must be a scalar.
    grad: A `Tensor`. Must have the same type as `var`. The gradient.
    indices: A `Tensor`. Must be one of the following types: `int32`, `int64`.
      A vector of indices into the first dimension of var, ms and mom.
    use_locking: An optional `bool`. Defaults to `False`.
      If `True`, updating of the var, ms, and mom tensors is protected
      by a lock; otherwise the behavior is undefined, but may exhibit less
      contention.
    name: A name for the operation (optional).

  Returns:
    A mutable `Tensor`. Has the same type as `var`.
  Nsparse_apply_rms_prop op does not support eager execution. Arg 'out' is a ref.NFr   SparseApplyRMSPropr  r$   r  r%   )r   r   r   r   rF   r   r   r   rx  r   r#   r3   r4   r5   r6   r7   r8   r9   r:   s                      r;   sparse_apply_rms_propr2    s
   P 
			0h..0$#\\
g
hhK"";>+'88#"#"#'/t&-;#'	)!QX
 QK'""$3%%c*J  ,m  /1F ::LlFG=('	.r=   zraw_ops.SparseApplyRMSPropc                     t        d      )Nr0  r?   )r   r   r   r   rF   r   r   r   rx  r   r#   r@   s               r;   $sparse_apply_rms_prop_eager_fallbackr4    r  r=   )FN)FTN)FFN(#  __doc__collectionstensorflow.pythonr   tensorflow.python.eagerr   r&   r   r   r   r*   tensorflow.python.frameworkr   r   tensorflow.security.fuzzing.pyr   _atypesr	   _op_def_registryr
   r   r   r,   "tensorflow.python.util.deprecationr   tensorflow.python.utilr   	_dispatch tensorflow.python.util.tf_exportr   typingr   r   r   typing_extensionsr   r   boolr<   	to_raw_opr!   rA   rC   rK   rI   rM   rO   rU   rS   rW   rX   Int64rb   r`   rd   rf   rk   ri   rm   rn   ru   rs   rw   ry   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   Resourcer   r   r   r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r   r%  r"  r$  r(  r-  r+  r,  r/  r3  r1  r2  r5  r9  r7  r8  r;  r@  r=  r?  rB  rF  rD  rE  rI  rM  rK  rL  rO  rT  rQ  rR  rW  r[  rY  rZ  r]  ra  r_  r`  rc  rg  re  rf  rj  rn  rl  rm  rp  rt  rr  rs  rv  rw  r}  rz  r|  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r
  r	  r  r  r  r  r  r  r  r  r  r  r  r  r  r"  r!  r$  r%  r&  r*  r)  r,  r-  r.  r2  r1  r4   r=   r;   <module>rH     s6  
  6 7 1 7 9 F K 3 I C 8 6 % % '-/ACWYln  BS  Uc  et  vE  GV  Xf  hx  zJ  L[  ]n  p@  BR  Td  fv  xG  H 6y&6!67 6IcK[F[<\ 6ajkn  qA  lA  bB 6  QZ  [^  `p  [p  Qq 6  w@  AD  FV  AV  wW 6  `i  jm  o  j  `@ 6  IR  SV  Xh  Sh  Ii 6  t}  ~A  CS  ~S  tT 6  \e  fi  k{  f{  \| 6  JN 6  dm  nq  sC  nC  dD 6p /i-.~t~~m/LM_i5E0E&F _9UXZjUjKk _pyz}  @P  {P  qQ _  `i  jm  o  j  `@ _  FO  PS  Ue  Pe  Ff _  ox  y|  ~N  yN  oO _  Xa  be  gw  bw  Xx _  CL  MP  Rb  Mb  Cc _  kt  ux  zJ  uJ  kK _  Z^ _  nw  x{  }M  xM  nN _ 13EG[]p  sD  FW  Yg  ix  zI  KZ  \j  l|  ~N  P_  ar  tD  FV  Xh  jz  |K  L 2	#'9"9: 29SRdMdCe 2u~  @C  EW  @W  vX 2  ^g  hk  m  h  ^@ 2  GP  QT  Vh  Qh  Gi 2  t}  ~A  CU  ~U  tV 2  ^g  hk  m  h  ^@ 2  NR 2  hq  ru  wI  rI  hJ 2h 3	12>4>>.3QR`y6H1H'I `R[\_as\sRt `  EN  OR  Tf  Of  Eg `  mv  wz  |N  wN  mO `  V_  `c  ew  `w  Vx `  CL  MP  Rd  Md  Ce `  mv  wz  |N  wN  mO `  ^b `  r{  |  AS  |S  rT ` /1CEY[n  qB  DU  We  gv  xG  IX  Zh  jz  |L  N]  _p  rB  DT  Vf  hx  zI  J 0y&7!78 03PaKaAb 0hqru  xI  sI  iJ 0  R[  \_  ar  \r  Rs 0  AE 0  Z^ 0  s|  }@  BS  }S  sT 0d 1y/01NO_i5F0F&G _PYZ]_pZpPq _  xA  BE  GX  BX  xY _  aj  kn  pA  kA  aB _  QU _  ei _  yB  CF  HY  CY  yZ _ 35GI]_r  uF  HY  [i  kz  |K  M\  ^l  n~  @P  Ra  ct  vF  HX  Zj  l|  ~M  N 1)C)<$<= 1U^_bdw_wUx 1  Yb  cf  h{  c{  Y| 1  DM  NQ  Sf  Nf  Dg 1  mv  wz  |O  wO  mP 1  V_  `c  ex  `x  Vy 1  H  IL  Na  Ia  b 1  qz  {~  @G  @M  @M  {M  qN 1  \` 1  v  @C  EX  @X  vY 1f 534^T^^DT5UVb38K3K)L bdmnq  tG  oG  eH b  hq  ru  wJ  rJ  hK b  S\  ]`  bu  ]u  Sv b  |E  FI  K^  F^  |_ b  en  or  tG  oG  eH b  NW  X[  ]p  Xp  Nq b  @I  JM  OV  O\  O\  J\  @] b  lp b  @I  JM  Ob  Jb  @c b 35GI]_r  uF  HY  [i  kz  |K  M\  ^l  n~  @P  Ra  ct  vF  HX  Zj  l|  ~M  N 2)C)<$<= 2iPSUhPhFi 2oxy|  R  zR  pS 2  ^g  hk  m@  h@  ^A 2  IR  SV  Xk  Sk  Il 2  z~ 2  SW 2  lu  vy  {N  vN  lO 2h 534^T^^DT5UVb38K3K)L bU^_bdw_wUx b  H  IL  Na  Ia  b b  mv  wz  |O  wO  mP b  Xa  be  gz  bz  X{ b  JN b  ^b b  r{  |  AT  |T  rU b )+=?SUhj{  ~O  Q_  ap  rA  CR  Tb  dt  vF  HW  Yj  l|  ~N  P`  br  tC  D@Ic>12 @ynAT7U @ZcdgiwdwZx @  HQ  RU  We  Re  Hf @  u~  B  DR  R  uS @  Yb  cf  hv  cv  Yw @  @I  JM  O]  J]  @^ @  gp  qt  vD  qD  gE @  PY  Z]  _m  Zm  Pn @  v  @C  ES  @S  vT @  bf @  { @  U^  _b  dr  _r  Us @D +I)*>4>>*+EF	\9S.-@#A \iPSUcPcFd \irsv  yG  tG  jH \  W`  ad  ft  at  Wu \  DM  NQ  Sa  Na  Db \  hq  ru  wE  rE  hF \  OX  Y\  ^l  Yl  Om \  v  @C  ES  @S  vT \  _h  il  n|  i|  _} \  EN  OR  Tb  Ob  Ec \  rv \  FJ \  Zc  dg  iw  dw  Zx \ /1CEY[n  qB  DU  We  gv  xG  IX  Zh  jz  |L  N]  _p  rB  DT  Vf  hx  zI  J 0	#'8"89 0iM^H^>_ 0enor  uF  pF  fG 0  PY  Z]  _p  Zp  Pq 0  H  IL  N_  I_  ` 0  hq  ru  wH  rH  hI 0  QZ  [^  `q  [q  Qr 0  @D 0  Zc  dg  iz  dz  Z{ 0d 1y/01OP`y6G1G'H `YWZ\mWmMn `t}  B  DU  U  uV `  _h  il  n  i  _@ `  NW  X[  ]n  Xn  No `  w@  AD  FW  AW  wX `  `i  jm  o@  j@  `A `  PT `  dm  nq  sD  nD  dE ` $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z E30I+I!J EPYZ]_xZxPy E  @I  JM  Oh  Jh  @i E  py  z}  X  zX  pY E  _h  il  nG  iG  _H E  OX  Y\  ^w  Yw  Ox E  DM  NQ  Sl  Nl  Dm E  xA  BE  G`  B`  xa E  ir  sv  xQ  sQ  iR E  `d E  zC  DG  Ib  Db  zc EN Ay!?@PgAhi i	#?X:X0Y i_hil  oH  jH  `I i  OX  Y\  ^w  Yw  Ox i  H  IL  Ng  Ig  h i  nw  x{  }V  xV  nW i  ^g  hk  mF  hF  ^G i  S\  ]`  b{  ]{  S| i  GP  QT  Vo  Qo  Gp i  xA  BE  G`  B`  xa i  pt i  DM  NQ  Sl  Nl  Dm i )+=?SUhj{  ~O  Q_  ap  rA  CR  Tb  dt  vF  HW  Yj  l|  ~N  P`  br  tC  D;Ic>12 ;9S.EX;Y ;clmp  sA  nA  dB ;  JS  TW  Yg  Tg  Jh ;  nw  x{  }K  xK  nL ;  R[  \_  ao  \o  Rp ;  v  @C  ES  @S  vT ;  `i  jm  o}  j}  `~ ;  LP ;  nr ;  HQ  RU  We  Re  Hf ;z +I)*>4>>*+EF	\9S.-@#A \)TWYgTgJh \r{|  BP  }P  sQ \  Yb  cf  hv  cv  Yw \  }F  GJ  LZ  GZ  }[ \  aj  kn  p~  k~  a \  EN  OR  Tb  Ob  Ec \  ox  y|  ~L  yL  oM \  \` \  y} \  MV  WZ  \j  Wj  Mk \ -/ACWYln  BS  Uc  et  vE  GV  Xf  hx  zJ  L[  ]n  p@  BR  Td  fv  xG  H ?y&6!67 ?	#O_J_@` ?jstw  zJ  uJ  kK ?  S\  ]`  br  ]r  Ss ?  yB  CF  HX  CX  yY ?  _h  il  n~  i~  _ ?  EN  OR  Td  Od  Ee ?  u~  B  DT  T  uU ?  aj  kn  p@  k@  aA ?  OS ?  qu ?  KT  UX  Zj  Uj  Kk ?B /i-.~t~~m/LM_i5E0E&F _yY\^nYnOo _  zC  DG  IY  DY  zZ _  bk  lo  qA  lA  bB _  HQ  RU  Wg  Rg  Hh _  nw  x{  }M  xM  nN _  T]  ^a  cs  ^s  Tt _  DM  NQ  Sc  Nc  Dd _  py  z}  O  zO  pP _  _c _  |@ _  PY  Z]  _o  Zo  Pp _ $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z $	#/H*H I $R[\_az\zR{ $  EN  OR  Tm  Om  En $  |@ $  V_  `c  e~  `~  V $L Ay!?@PfAgh hy>W9W/X hajkn  qJ  lJ  bK h  T]  ^a  c|  ^|  T} h  LP h  `i  jm  oH  jH  `I h 13EG[]p  sD  FW  Yg  ix  zI  KZ  \j  l|  ~N  P_  ar  tD  FV  Xh  jz  |K  L 7	#'9"9: 79SRdMdCe 7ktux  {M  vM  lN 7  V_  `c  ew  `w  Vx 7  DM  NQ  Se  Ne  Df 7  tx 7  MQ 7  gp  qt  vH  qH  gI 7r 3	12>4>>.3QR`y6H1H'I `R[\_as\sRt `  {D  EH  J\  E\  {] `  en  or  tF  oF  eG `  S\  ]`  bt  ]t  Su `  DH `  X\ `  lu  vy  {M  vM  lN ` 35GI]_r  uF  HY  [i  kz  |K  M\  ^l  n~  @P  Ra  ct  vF  HX  Zj  l|  ~M  N 0)C)<$<= 0)CQdLdBe 0ktux  {N  vN  lO 0  Zc  dg  i|  d|  Z} 0  KT  UX  Zm  Um  Kn 0  v  @C  EX  @X  vY 0  aj  kn  pC  kC  aD 0  RV 0  lu  vy  {N  vN  lO 0d 534^T^^DT5UVb38K3K)L bQZ[^`s[sQt b  {D  EH  J]  E]  {^ b  ir  sv  xK  sK  iL b  Zc  dg  i|  d|  Z} b  EN  OR  Tg  Og  Eh b  py  z}  R  zR  pS b  bf b  v  @C  EX  @X  vY b $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z .	#/H*H I .R[\_az\zR{ .  BK  LO  Qj  Lj  Bk .  qz  {~  @Y  {Y  qZ .  `i  jm  oH  jH  `I .  QZ  [^  `y  [y  Qz .  HL .  bk  lo  qJ  lJ  bK .` Ay!?@PfAgh hy>W9W/X hajkn  qJ  lJ  bK h  QZ  [^  `y  [y  Qz h  @I  JM  Oh  Jh  @i h  ox  y|  ~W  yW  oX h  `i  jm  oH  jH  `I h  X\ h  lu  vy  {T  vT  lU h %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !,38Y3Y)Z ,clmp  sT  nT  dU ,  [d  eh  jK  eK  [L ,  R[  \_  aB  \B  RC ,  LU  VY  [|  V|  L} ,  KO ,  en  or  tU  oU  eV ,\  Qy)OPQ_QUQ_Q_`  RA   B q	#GhBh8i qr{|  Bc  }c  sd q  js  tw  yZ  tZ  j[ q  aj  kn  pQ  kQ  aR q  [d  eh  jK  eK  [L q  [_ q  ox  y|  ~_  y_  o` q /1CEY[n  qB  DU  We  gv  xG  IX  Zh  jz  |L  N]  _p  rB  DT  Vf  hx  zI  J :	#'8"89 :yN_I_?` :gpqt  wH  rH  hI :  OX  Y\  ^o  Yo  Op :  w@  AD  FW  AW  wX :  dm  nq  sD  nD  dE :  PY  Z]  _p  Zp  Pq :  yB  CF  HY  CY  yZ :  hl :  BK  LO  Qb  Lb  Bc :x 1y/01OP`y6G1G'H `iX[]nXnNo `v  AD  FW  AW  wX `  ^g  hk  m~  h~  ^ `  FO  PS  Uf  Pf  Fg `  s|  }@  BS  }S  sT `  _h  il  n  i  _@ `  HQ  RU  Wh  Rh  Hi `  x| `  LU  VY  [l  Vl  Lm ` ##=?QSgi|  P  Rc  es  uD  FU  Wf  hv  xH  JZ  \k  m~  @P  Rb  dt  vF  HW  X :	#w/?/?*? @ :YsT[TdTdOdEe :jstw  zA  zJ  zJ  uJ  kK :  Zc  dg  iA  dA  ZB :  HQ  RU  Wo  Ro  Hp :  yB  CF  H`  C`  ya :  js  tw  yQ  tQ  jR :  ]f  gj  lD  gD  ]E :  MV  WZ  \t  Wt  Mu :  CG :v ?i =>~t~~Nd?ef yg>N>N9N/O T]^acjcscs^sTt   zC  DG  IP  IY  IY  DY  zZ   ir  sv  xP  sP  iQ   W`  ad  f~  a~  W   HQ  RU  Wo  Ro  Hp   yB  CF  H`  C`  ya   lu  vy  {S  vS  lT   \e  fi  kC  fC  \D   SW " %%ACUWk  nA  CT  Vg  iw  yH  JY  [j  lz  |L  N^  `o  qB  DT  Vf  hx  zJ  L[  \ 630@0@+@!A 6)TWY`YiYiTiJj 6  {D  EH  JQ  JZ  JZ  EZ  {[ 6  aj  kn  pJ  kJ  aK 6  R[  \_  a{  \{  R| 6  GP  QT  Vp  Qp  Gq 6  yB  CF  Hb  Cb  yc 6  qu 6n C	"AB>4>>RiCjk 	#w?O?O:O0P YbcfhohxhxcxYy   JS  TW  Y`  Yi  Yi  Ti  Jj   py  z}  Y  zY  pZ   aj  kn  pJ  kJ  aK   V_  `c  e  `  V@   HQ  RU  Wq  Rq  Hr   AE " $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z 3	#w/?/?*? @ 3SVX_XhXhShIi 3oxy|  X  zX  pY 3  aj  kn  pI  kI  aJ 3  X\ 3  qu 3h Ay!?@PfAgh yg>N>N9N/O XabegngwgwbwXx   H  IL  Ng  Ig  h   py  z}  X  zX  pY   hl   |@ ( &&CEWYm  pC  EV  Xi  ky  {J  L[  ]l  n|  ~N  P`  bq  sD  FV  Xh  jz  |L  N]  ^ 99S'2B2B-B#C 9[dehjqjzjzez[{ 9  \e  fi  kr  k{  k{  f{  \| 9  DM  NQ  Sn  Nn  Do 9  u~  B  D_  _  u` 9  fo  ps  uP  pP  fQ 9  W`  ad  fA  aA  WB 9  QZ  [^  `g  `m  `m  [m  Qn 9  |@ 9t E#CD^T^^TmEno )CAQAQ<Q2R jstw  zA  zJ  zJ  uJ  kK   kt  ux  zA  zJ  zJ  uJ  kK   S\  ]`  b}  ]}  S~   DM  NQ  Sn  Nn  Do   u~  B  D_  _  u`   fo  ps  uP  pP  fQ   `i  jm  ov  o|  o|  j|  `}   LP & &&CEWYm  pC  EV  Xi  ky  {J  L[  ]l  n|  ~N  P`  bq  sD  FV  Xh  jz  |L  N]  ^ 69S'2B2B-B#C 6IVY[b[k[kVkLl 6r{|  B]  }]  s^ 6  ir  sv  xS  sS  iT 6  \e  fi  kF  fF  \G 6  UY 6  nr 6n E#CD^T^^TmEno )CAQAQ<Q2R [dehjqjzjzez[{   BK  LO  Ql  Ll  Bm   xA  BE  Gb  Bb  xc   kt  ux  zU  uU  kV   ei   y} * !!9;MOcex  {L  N_  ao  q@  BQ  Sb  dr  tD  FV  Xg  iz  |L  N^  `p  rB  DS  T EYsG,<,<'<= E)CQXQaQaLaBb Egpqtv}  wG  wG  rG  hH E  W`  ad  f|  a|  W} E  LU  VY  [q  Vq  Lr E  xA  BE  G]  B]  x^ E  gp  qt  vL  qL  gM E  V_  `c  e{  `{  V| E  GP  QT  Vl  Ql  Gm E  u~  B  DZ  Z  u[ E  im E  BF EL ;I9:>4>>J];^_ Ic7;K;K6K,L QZ[^`g`p`p[pQq v  AD  FM  FV  FV  AV  wW   fo  ps  uK  pK  fL   [d  eh  j@  e@  [A   GP  QT  Vl  Ql  Gm   v  @C  E[  @[  v\   en  or  tJ  oJ  eK   V_  `c  e{  `{  V|   DM  NQ  Si  Ni  Dj   y}   MQ * %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !B)C9I9I4I*J ByY\^e^n^nYnOo Bt}  B  DK  DT  DT  T  uU B  ]f  gj  ls  l|  l|  g|  ]} B  LU  VY  [|  V|  L} B  LU  VY  [|  V|  L} B  CL  MP  Rs  Ms  Ct B  }F  GJ  Lm  Gm  }n B  w@  AD  Fg  Ag  wh B  s|  }@  Bc  }c  sd B  lu  vy  {\	  v\	  l]	 B  k	o	 BF  Qy)OPQ_QUQ_Q_  aA  RB   C 3HXHXCX9Y ^ghkmtm}m}h}^~   DM  NQ  SZ  Sc  Sc  Nc  Dd   lu  vy  {B  {K  {K  vK  lL   [d  eh  jK  eK  [L   [d  eh  jK  eK  [L   R[  \_  aB  \B  RC   LU  VY  [|  V|  L}   FO  PS  Uv  Pv  Fw   BK  LO  Qr  Lr  Bs   {D	  E	H	  J	k	  E	k	  {l	   {		 & $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z 330@0@+@!A 3iPSU\UeUePeFf 3luvy  |U  wU  mV 3  _h  il  nG  iG  _H 3  V_  `c  e~  `~  V 3  GP  QT  Vo  Qo  Gp 3  xA  BE  G`  B`  xa 3  os 3h Ay!?@PgAhi 	#w?O?O:O0P U^_bdkdtdt_tUu   |E  FI  Kd  Fd  |e   nw  x{  }V  xV  nW   en  or  tM  oM  eN   V_  `c  e~  `~  V   GP  QT  Vo  Qo  Gp   C   %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !H)C9I9I4I*J HPYZ]_f_o_oZoPp Hv  AD  FM  FV  FV  AV  wW H  ^g  hk  mt  m}  m}  h}  ^~ H  DM  NQ  St  Nt  Du H  |E  FI  Kl  Fl  |m H  yB  CF  Hi  Ci  yj H  u~  B  De  e  uf H  nw  x{  }^  x^  n_ H  mq HR  Qy)OPQ_QUQ_Q_  aA  RB   C 3HXHXCX9Y _hilnun~n~i~_   FO  PS  U\  Ue  Ue  Pe  Ff   mv  wz  |C  |L  |L  wL  mM   S\  ]`  bC  ]C  SD   KT  UX  Z{  U{  K|   HQ  RU  Wx  Rx  Hy   DM  NQ  St  Nt  Du   }F  GJ  Lm  Gm  }n   }A & !!9;MOcex  {L  N_  ao  q@  BQ  Sb  dr  tD  FV  Xg  iz  |L  N^  `p  rB  DS  T BYsG,<,<'<= BiPSU\UeUePeFf Bpyz}  @G  @P  @P  {P  qQ B  Yb  cf  h~  c~  Y B  EN  OR  Tj  Oj  Ek B  qz  {~  @V  {V  qW B  ]f  gj  lB  gB  ]C B  OX  Y\  ^t  Yt  Ou B  CG B  ei BF ;I9:>4>>J];^_ Ic7;K;K6K,L U^_bdkdtdt_tUu   @I  JM  OV  O_  O_  J_  @`   hq  ru  wM  rM  hN   T]  ^a  cy  ^y  Tz   @I  JM  Oe  Je  @f   lu  vy  {Q  vQ  lR   ^g  hk  mC  hC  ^D   SW   pt * ##=?QSgi|  P  Rc  es  uD  FU  Wf  hv  xH  JZ  \k  m~  @P  Rb  dt  vF  HW  X E	#w/?/?*? @ ESVX_XhXhShIi Es|  ~A  CJ  CS  CS  ~S  tT E  \e  fi  kC  fC  \D E  JS  TW  Yq  Tq  Jr E  xA  BE  G_  B_  x` E  fo  ps  uM  pM  fN E  ^g  hk  mE  hE  ^F E  R[  \_  ay  \y  Rz E  HL E  jn EL ?i =>~t~~Nd?ef yg>N>N9N/O XabegngwgwbwXx   CL  MP  RY  Rb  Rb  Mb  Cc   kt  ux  zR  uR  kS   Yb  cf  h@  c@  YA   GP  QT  Vn  Qn  Go   u~  B  D\  \  u]   mv  wz  |T  wT  mU   aj  kn  pH  kH  aI   X\   uy * %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !(38H8H3H)I (R[\_  bC  ]C  SD (  MV  WZ  \}  W}  M~ (  LP (R  Qy)OPQ_QUQ_Q_`  RA   B 	#wGWGWBW8X ajkn  qR  lR  bS   \e  fi  kL  fL  \M   \`   #**KM_au  xK  M^  `q  sA  CR  Tc  et  vD  FV  Xh  jy  {L  N^  `p  rB  DT  Ve  #f ;yg6F6F1F'G ;PYZ]_f_o_oZoPp ;v  AD  Fe  Ae  wf ;  nw  x{  }\  x\  n] ;  ir  sv  xW  sW  iX ;  fj ;  C ;x MY'KL^T^^\yMz{ iWEUEU@U6V _hilnun~n~i~_   FO  PS  Ut  Pt  Fu   }F  GJ  Lk  Gk  }l   xA  BE  Gf  Bf  xg   vz   JN * %%ACUWk  nA  CT  Vg  iw  yH  JY  [j  lz  |L  N^  `o  qB  DT  Vf  hx  zJ  L[  \ :30@0@+@!A :)TWY`YiYiTiJj :pyz}  @Z  {Z  q[ :  cl  mp  rL  mL  cM :  Yb  cf  hB  cB  YC :  QU :  jn :v C	"AB>4>>RiCjk 	#w?O?O:O0P YbcfhohxhxcxYy   @I  JM  Oi  Ji  @j   r{  |  A[  |[  r\   hq  ru  wQ  rQ  hR   ae   uy ( &&CEWYm  pC  EV  Xi  ky  {J  L[  ]l  n|  ~N  P`  bq  sD  FV  Xh  jz  |L  N]  ^ 39S'2B2B-B#C 3	RUW^WgWgRgHh 3nwx{  ~Y  yY  oZ 3  en  or  tO  oO  eP 3  ^g  hk  mH  hH  ^I 3  QZ  [^  `{  [{  Q| 3  DM  NQ  Sn  Nn  Do 3  }A 3h E#CD^T^^TmEno )CAQAQ<Q2R W`adfmfvfvavWw   ~G  HK  Mh  Hh  ~i   t}  ~A  C^  ~^  t_   mv  wz  |W  wW  mX   `i  jm  oJ  jJ  `K   S\  ]`  b}  ]}  S~   MQ " %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !338H8H3H)I 3R[\_ahaqaq\qRr 3  yB  CF  Hi  Ci  yj 3  py  z}  `  z`  pa 3  gp  qt  vW  qW  gX 3  `i  jm  oP  jP  `Q 3  _c 3h  Qy)OPQ_QUQ_Q_`  RA   B 	#wGWGWBW8X ajknpw  qA  qA  lA  bB   HQ  RU  Wx  Rx  Hy   H  IL  No  Io  p   v  @C  Ef  @f  vg   ox  y|  ~_  y_  o`   os " -44_as  vJ  L_  ar  tE  GU  Wf  hw  yH  JX  Zj  l|  ~M  O`  br  tD  FV  Xh  jy  -z )2)CAQAQ<Q2R 2[deh  kT  fT  \U 2  [d  eh  jS  eS  [T 2  Zc  dg  iR  dR  ZS 2  \e  fi  kT  fT  \U 2  cg 2f (ay1_'`aoaeaoao  qY  bZ  ([ $3PWP`P`K`Aa jstw  zc  uc  kd   js  tw  yb  tb  jc   ir  sv  xa  sa  ib   kt  ux  zc  uc  kd   sw   $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z =30@0@+@!A =yQTV]VfVfQfGg =nwx{  ~E  ~N  ~N  yN  oO =  U^  _b  d}  _}  U~ =  EN  OR  Tm  Om  En =  zC  DG  Ib  Db  zc =  nw  x{  }V  xV  nW =  _h  il  nG  iG  _H =  VZ =| Ay!?@PgAhi 	#w?O?O:O0P V_`celeueu`uVv   ~G  HK  MT  M]  M]  H]  ~^   dm  nq  sL  nL  dM   T]  ^a  c|  ^|  T}   IR  SV  Xq  Sq  Ir   }F  GJ  Le  Ge  }f   nw  x{  }V  xV  nW   fj " $++MOacw  zM  O`  bs  uC  ET  Ve  gv  xF  HX  Zj  l{  }N  P`  br  tD  FV  Xg  $h  *12[]ln}*~ '5	#w7G7G2G(H 5QZ[^`g`p`p[pQq 5  BK  LO  QX  Qa  Qa  La  Bb 5  hq  ru  wW  rW  hX 5  _h  il  nN  iN  _O 5  Zc  dg  iI  dI  ZJ 5  R[  \_  aA  \A  RB 5  MV  WZ  \C  WC  MD 5  RV 5l Oi(MN~t~~^|O}~ ygFVFVAV7W `ijmovooj  aA   QZ  [^  `g  `p  `p  [p  Qq   w@  AD  Ff  Af  wg   nw  x{  }]  x]  n^   ir  sv  xX  sX  iY   aj  kn  pP  kP  aQ   \e  fi  kR  fR  \S   bf ( #**KM_au  xK  M^  `q  sA  CR  Tc  et  vD  FV  Xh  jy  {L  N^  `p  rB  DT  Ve  #f )01Y[jl{)| &7yg6F6F1F'G 7PYZ]_f_o_oZoPp 7v  AD  Fe  Ae  wf 7  nw  x{  }\  x\  n] 7  hq  ru  w]  r]  h^ 7  lp 7  EI 7p MY'KL^T^^\yMz{ iWEUEU@U6V _hilnun~n~i~_   FO  PS  Ut  Pt  Fu   }F  GJ  Lk  Gk  }l   w@  AD  Fl  Al  wm   |@   PT , %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !+23]_np  ,A (<)C9I9I4I*J <bkloqx  rB  rB  mB  cC <  cl  mp  ry  rB  rB  mB  cC <  KT  UX  Z{  U{  K| <  GP  QT  V~  Q~  G <  EN  OR  Tu  Ou  Ev <  |E  FI  Kl  Fl  |m <  s|  }@  Bc  }c  sd <  s|  }@  BI  BO  BO  }O  sP <  ^b <z  Qy)OPQ_QUQ_Q_  aA  RB   C 3HXHXCX9Y qz{~  AH  AQ  AQ  |Q  rR   r{  |  AH  AQ  AQ  |Q  rR   Zc  dg  iJ  dJ  ZK   V_  `c  eM  `M  VN   T]  ^a  cD  ^D  TE   KT  UX  Z{  U{  K|   BK  LO  Qr  Lr  Bs   BK  LO  QX  Q^  Q^  L^  B_   nr * %,,OQcey  |O  Qb  du  wE  GV  Xg  ix  zH  JZ  \l  n}  P  Rb  dt  vF  HX  Zi  %j !+23]_np  ,A (;)C9I9I4I*J ;S\]`bibrbr]rSs ;  zC  DG  Ij  Dj  zk ;  v  @C  Ef  @f  vg ;  ox  y|  ~_  y_  o` ;  kt  ux  zb  ub  kc ;  qu ;  JN ;x  Qy)OPQ_QUQ_Q_  aA  RB   C 3HXHXCX9Y bkloqx  rB  rB  mB  cC   IR  SV  Xy  Sy  Iz   EN  OR  Tu  Ou  Ev   ~G  HK  Mn  Hn  ~o   zC  DG  Iq  Dq  zr   AE   UY , +22[]o  rF  H[  ]n  pA  CQ  Sb  ds  uD  FT  Vf  hx  zI  K\  ^n  p@  BR  Td  fu  +v '189ikz  }L  2M .J3@P@P;P1Q JW`adfmfvfvavWw J  ~G  HK  MT  M]  M]  H]  ~^ J  en  or  t{  tD  tD  oD  eE J  KT  UX  ZA  UA  KB J  IR  SV  X  S  I@ J  LU  VY  [B  VB  LC J  NW  X[  ]D  XD  NE J  MV  WZ  \C  WC  MD J  OX  Y\  ^L	  YL	  OM	 J  [	_	 JV &]Y/[%\]k]a]k]k  mT  ^U  &V "	#wO_O_J_@` fopsu|  vF  vF  qF  gG   MV  WZ  \c  \l  \l  Wl  Mm   t}  ~A  CJ  CS  CS  ~S  tT   Zc  dg  iP  dP  ZQ   Xa  be  gN  bN  XO   [d  eh  jQ  eQ  [R   ]f  gj  lS  gS  ]T   \e  fi  kR  fR  \S   ^g  hk  m[	  h[	  ^\	   k	o	 *  ''EGY[o  rE  GX  Zk  m{  }L  N]  _n  p~  @P  Rb  ds  uF  HX  Zj  l|  ~N  P_   ` &-.SUdfu&v #FIc73C3C.C$D FYWZ\c\l\lWlMm F  xA  BE  GN  GW  GW  BW  xX F  `i  jm  oK  jK  `L F  W`  ad  fI  aI  WJ F  PY  Z]  _{  Z{  P| F  BK  LO  Qm  Lm  Bn F  t}  ~A  C_  ~_  t` F  lu  vy  {W  vW  lX F  fj F  HL FN G)$EF~t~~VpGqr 9S'BRBR=R3S \efikrk{k{f{\|   GP  QT  V]  Vf  Vf  Qf  Gg   ox  y|  ~Z  yZ  o[   fo  ps  uX  pX  fY   _h  il  nJ  iJ  _K   QZ  [^  `|  [|  Q}   CL  MP  Rn  Mn  Co   {D  EH  Jf  Ef  {g   vz   SW . "))IK]_s  vI  K\  ^o  q  AP  Ra  cr  tB  DT  Vf  hw  yJ  L\  ^n  p@  BR  Tc  "d (/0WYhjy(z %Jyg6F6F1F'G JPYZ]_f_o_oZoPp J  {D  EH  JQ  JZ  JZ  EZ  {[ J  cl  mp  rP  mP  cQ J  \e  fi  kP  fP  \Q J  W`  ad  fD  aD  WE J  KT  UX  Zx  Ux  Ky J  H  IL  Nl  Il  m J  }F  GJ  Lj  Gj  }k J  w@  AD  Fd  Ad  we J  sw J  U	Y	 JV KI&IJ>4>>ZwKxy iWEUEU@U6V _hilnun~n~i~_   JS  TW  Y`  Yi  Yi  Ti  Jj   r{  |  A_  |_  r`   kt  ux  z_  u_  k`   fo  ps  uS  pS  fT   Zc  dg  iG  dG  ZH   NW  X[  ]{  X{  N|   LU  VY  [y  Vy  Lz   FO  PS  Us  Ps  Ft   C	G	   `	d	 . )00WYk  nB  DW  Yj  l}  M  O^  `o  q@  BP  Rb  dt  vE  GX  Zj  l|  ~N  P`  bq  )r %/67egv  yH  0I ,BiW=M=M8M.N BW`adfmfvfvavWw B  ~G  HK  Mr  Hr  ~s B  {D  EH  Jo  Eo  {p B  {D  EH  Jv  Ev  {w B  CL  MP  Rw  Mw  Cx B  FJ B  _c BF $Y9-W#XYgY]YgYg  iM  ZN  $O  YsGL\L\G\=] fopsu|  vF  vF  qF  gG   MV  WZ  \A  WA  MB   JS  TW  Y~  T~  J   JS  TW  YE  TE  JF   R[  \_  aF  \F  RG   VZ   jn , $++MOacw  zM  O`  bs  uC  ET  Ve  gv  xF  HX  Zj  l{  }N  P`  br  tD  FV  Xg  $h  *12[]ln}*~ '@	#w7G7G2G(H @QZ[^`g`p`p[pQq @  xA  BE  Gg  Bg  xh @  py  z}  _  z_  p` @  kt  ux  za  ua  kb @  nw  x{  }]  x]  n^ @  lp @  EI @B Oi(MN~t~~^|O}~ ygFVFVAV7W `ijmovooj  aA   GP  QT  Vv  Qv  Gw   H  IL  Nn  In  o   zC  DG  Ip  Dp  zq   }F  GJ  Ll  Gl  }m   |@   PT , +22[]o  rF  H[  ]n  pA  CQ  Sb  ds  uD  FT  Vf  hx  zI  K\  ^n  p@  BR  Td  fu  +v '189ikz  }L  2M .9	#w?O?O:O0P 9YbcfhohxhxcxYy 9  @I  JM  Ov  Jv  @w 9  }F  GJ  Ls  Gs  }t 9  zC  DG  Ip  Dp  zq 9  yB  CF  Ho  Co  yp 9  {D  EH  Jx  Ex  {y 9  GK 9t &]Y/[%\]k]a]k]k  mS  ^T  &U "ygN^N^I^?_ hqruw~  xH  xH  sH  iI   OX  Y\  ^E  YE  OF   LU  VY  [B  VB  LC   IR  SV  X  S  I@   HQ  RU  W~  R~  H   JS  TW  YG  TG  JH   W[ & 3::km  BV  Xk  m~  @Q  Sa  cr  tC  ET  Vd  fv  xH  JY  [l  n~  @P  Rb  dt  vE  3F /9@Ay  |K  M\  :] 663HXHXCX9Y 6bklo  ra  ma  cb 6  hq  ru  wf  rf  hg 6  mv  wz  |k  wk  ml 6  t}  ~A  Cr  ~r  ts 6  ~G  HK  MC  HC  ~D 6  RV 6n .mY7k-lm{mqm{m{  }l  nm  .n *	RUW^WgWgRgHh qz{~  Ap  |p  rq   w@  AD  Fu  Au  wv   |E  FI  Kz  Fz  |{   CL  MP  RA  MA  CB   MV  WZ  \R  WR  MS   bf $ #**KM_au  xK  M^  `q  sA  CR  Tc  et  vD  FV  Xh  jy  {L  N^  `p  rB  DT  Ve  #f )01Y[jl{)| &@	#w7G7G2G(H @iX[]d]m]mXmNn @u~  @C  EL  EU  EU  @U  vV @  \e  fi  kJ  fJ  \K @  R[  \_  a@  \@  RA @  MV  WZ  \{  W{  M| @  GP  QT  Vu  Qu  Gv @  ~G  HK  Ml  Hl  ~m @  xA  BE  Gm  Bm  xn @  |@ @B MY'KL^T^^\zM{| ygFVFVAV7W ]fgjlsl|l|g|]}   EN  OR  T[  Td  Td  Od  Ee   kt  ux  zY  uY  kZ   aj  kn  pO  kO  aP   \e  fi  kJ  fJ  \K   V_  `c  eD  `D  VE   MV  WZ  \{  W{  M|   GP  QT  V|  Q|  G}   LP ( ##=?QSgi|  P  Rc  es  uD  FU  Wf  hv  xH  JZ  \k  m~  @P  Rb  dt  vF  HW  X ")*K_^m"n 0y.F)FG 0PYZ]_wZwPx 0  IR  SV  Xp  Sp  Iq 0  w@  AD  F^  A^  w_ 0  fo  ps  uM  pM  fN 0  Yb  cf  h@  c@  YA 0  IR  SV  Xp  Sp  Iq 0  |E  FI  Kj  Fj  |k 0  y} 0  S\  ]`  bz  ]z  S{ 0d ?i =>~t~~Nc?de gi=U8U.V g_hil  oG  jG  `H g  Xa  be  g  b  X@ g  FO  PS  Um  Pm  Fn g  u~  B  D\  \  u] g  hq  ru  wO  rO  hP g  Xa  be  g  b  X@ g  KT  UX  Zy  Uy  Kz g  IM g  ]f  gj  lD  gD  ]E g "";=OQegz  }N  Pa  cq  sB  DS  Ud  ft  vF  HX  Zi  k|  ~N  P`  br  tD  FU  V !()I?\k!l 4i-D(DE 4iX[]tXtNu 4  |E  FI  Kb  Fb  |c 4  kt  ux  zQ  uQ  kR 4  ]f  gj  lJ  gJ  ]K 4  Y] 4  rv 4  KT  UX  Zq  Uq  Kr 4l =Y;<^T^^L`=ab fYs<S7S-T f]fgj  mD  hD  ^E f  KT  UX  Zq  Uq  Kr f  zC  DG  I`  D`  za f  lu  vy  {Y  vY  lZ f  im f  }A f  QZ  [^  `w  [w  Qx f $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z #*+M`o#p  530I+I!J 5bklo  rK  mK  cL 5  lu  vy  {T  vT  lU 5  ]f  gj  lE  gE  ]F 5  QZ  [^  `@  [@  QA 5  GP  QT  Vo  Qo  Gp 5  v  @C  E^  @^  v_ 5  en  or  tM  oM  eN 5  ]f  gj  ls  ly  ly  gy  ]z 5  HL 5  bk  lo  qJ	  lJ	  bK	 5n Ay!?@PgAhi i	#?X:X0Y iqz{~  AZ  |Z  r[ i  {D  EH  Jc  Ec  {d i  lu  vy  {T  vT  lU i  `i  jm  oO  jO  `P i  V_  `c  e~  `~  V i  EN  OR  Tm  Om  En i  t}  ~A  C\  ~\  t] i  lu  vy  {B  {H  {H  vH  lI i  X\ i  lu  vy  {T	  vT	  lU	 i $$?ASUik~  AR  Te  gu  wF  HW  Yh  jx  zJ  L\  ^m  o@  BR  Td  fv  xH  JY  Z #*+M`o#p  730I+I!J 7S\]`b{]{S| 7  CL  MP  Rk  Mk  Cl 7  w@  AD  F_  A_  w` 7  hq  ru  wP  rP  hQ 7  \e  fi  kK  fK  \L 7  Z^ 7  sw 7  LU  VY  [t  Vt  Lu 7r Ay!?@PgAhi i	#?X:X0Y ibklo  rK  mK  cL i  R[  \_  az  \z  R{ i  FO  PS  Un  Pn  Fo i  w@  AD  F_  A_  w` i  kt  ux  zZ  uZ  k[ i  jn i  ~B i  R[  \_  az  \z  R{ i #**KM_au  xK  M^  `q  sA  CR  Tc  et  vD  FV  Xh  jy  {L  N^  `p  rB  DT  Ve  #f )01Y[jl{)| &G	#7V2V(W G]fgj  mL  hL  ^M G  S\  ]`  bA  ]A  SB G  IR  SV  Xw  Sw  Ix G  ~G  HK  Ml  Hl  ~m G  t}  ~A  Cb  ~b  tc G  ox  y|  ~]  y]  o^ G  ir  sv  xW  sW  iX G  `i  jm  oN  jN  `O G  Zc  dg  iO	  dO	  ZP	 G  ^	b	 G  x	A
  B
E
  G
f
  B
f
  x	g
 GR MY'KL^T^^\zM{| pyFeAe7f pluvy  |[  w[  m\ p  bk  lo  qP  lP  bQ p  Xa  be  gF  bF  XG p  MV  WZ  \{  W{  M| p  CL  MP  Rq  Mq  Cr p  ~G  HK  Ml  Hl  ~m p  xA  BE  Gf  Bf  xg p  ox  y|  ~]  y]  o^ p  ir  sv  x^	  s^	  i_	 p  n	r	 p  B
K
  L
O
  Q
p
  L
p
  B
q
 p 57IK_at  wH  J[  ]k  m|  ~M  O^  `n  p@  BR  Tc  ev  xH  JZ  \l  n~  @O  P %&C_Vef A9S*>%>? A	RUWkRkHl Av  AD  FZ  AZ  w[ A  cl  mp  rF  mF  cG A  R[  \_  a|  \|  R} A  CL  MP  Rf  Mf  Cg A  mv  wz  |P  wP  mQ A  W`  ad  fz  az  W{ A  GP  QT  Vj  Qj  Gk A  y} A  [_ A  u~  B  DX  X  uY AF 7)56~t~~FW7XYc)C9M4M*N cW`adfzazW{ c  FO  PS  Ui  Pi  Fj c  r{  |  AU  |U  rV c  aj  kn  pK  kK  aL c  R[  \_  au  \u  Rv c  |E  FI  K_  F_  |` c  fo  ps  uI  pI  fJ c  V_  `c  ey  `y  Vz c  IM c  fj c  zC  DG  I]  D]  z^ c !!9;MOcex  {L  N_  ao  q@  BQ  Sb  dr  tD  FV  Xg  iz  |L  N^  `p  rB  DS  T  '(GZi j Ei-C(CD EYWZ\rWrMs E  ~G  HK  Mc  Hc  ~d E  lu  vy  {Q  vQ  lR E  ]f  gj  lI  gI  ]J E  PY  Z]  _u  Zu  Pv E  |E  FI  Ka  Fa  |b E  hq  ru  wM  rM  hN E  ^g  hk  mC  hC  ^D E  PY  Z]  _u  Zu  Pv E  DH E  fj E  @	I	  J	M	  O	e	  J	e	  @	f	 EN ;I9:>4>>J^;_` fYs<R7R-S f\efi  lB  gB  ]C f  MV  WZ  \r  Wr  Ms f  {D  EH  J`  E`  {a f  lu  vy  {X  vX  lY f  _h  il  nD  iD  _E f  KT  UX  Zp  Up  Kq f  w@  AD  F\  A\  w] f  mv  wz  |R  wR  mS f  _h  il  nD  iD  _E f  TX f  qu f  E	N	  O	R	  T	j	  O	j	  E	k	 f ##=?QSgi|  P  Rc  es  uD  FU  Wf  hv  xH  JZ  \k  m~  @P  Rb  dt  vF  HW  X ")*K_^m"n =y.F)FG =PYZ]_wZwPx =  H  IL  Nf  If  g =  ox  y|  ~V  yV  oW =  bk  lo  qP  lP  bQ =  ]f  gj  lD  gD  ]E =  SW =  lp =  FO  PS  Um  Pm  Fn =~ ?i =>~t~~Nc?de gi=U8U.V g_hil  oG  jG  `H g  NW  X[  ]u  Xu  Nv g  ~G  HK  Me  He  ~f g  qz  {~  @_  {_  q` g  lu  vy  {S  vS  lT g  cg g  w{ g  KT  UX  Zr  Ur  Ks g #**KM_au  xK  M^  `q  sA  CR  Tc  et  vD  FV  Xh  jy  {L  N^  `p  rB  DT  Ve  #f )01Y[jl{)| &4y6U1U'V 4_hil  oN  jN  `O 4  U^  _b  dC  _C  UD 4  JS  TW  Yx  Tx  Jy 4  H  IL  Nm  Im  n 4  v  @C  Ed  @d  ve 4  py  z}  e  ze  pf 4  tx 4  NW  X[  ]|  X|  N} 4l MY'KL^T^^\yMz{ oiEd@d6e onwx{  ~]  y]  o^ o  dm  nq  sR  nR  dS o  Yb  cf  hG  cG  YH o  NW  X[  ]|  X|  N} o  EN  OR  Ts  Os  Et o  H  IL  Nt  It  u o  DH o  Xa  be  gF  bF  XG o +22[]o  rF  H[  ]n  pA  CQ  Sb  ds  uD  FT  Vf  hx  zI  K\  ^n  p@  BR  Td  fu  +v '189ikz  }L  2M .2	#?f:f0g 2pyz}  @g  {g  qh 2  nw  x{  }d  xd  ne 2  kt  ux  za  ua  kb 2  js  tw  y`  t`  ja 2  lu  vy  {i  vi  lj 2  x| 2  R[  \_  aH  \H  RI 2h &]Y/[%\]k]a]k]k  mS  ^T  &U "xyNuIu?v x  @I  JM  Ov  Jv  @w x  }F  GJ  Ls  Gs  }t x  zC  DG  Ip  Dp  zq x  yB  CF  Ho  Co  yp x  {D  EH  Jx  Ex  {y x  HL x  \e  fi  kR  fR  \S x "";=OQegz  }N  Pa  cq  sB  DS  Ud  ft  vF  HX  Zi  k|  ~N  P`  br  tD  FU  V !()I?\k!l >y.E)EF >IVY[rVrLs >  {D  EH  Ja  Ea  {b >  hq  ru  wN  rN  hO >  V_  `c  e|  `|  V} >  IR  SV  Xo  So  Ip >  {D  EH  Ja  Ea  {b >  js  tw  yP  tP  jQ >  \e  fi  kI  fI  \J >  X\ >  r{  |  AX  |X  rY >@ =Y;<^T^^La=bc gi=T8T.U g[deh  kB  fB  \C g  JS  TW  Yp  Tp  Jq g  w@  AD  F]  A]  w^ g  en  or  tK  oK  eL g  Xa  be  g~  b~  X g  JS  TW  Yp  Tp  Jq g  yB  CF  H_  C_  y` g  kt  ux  zX  uX  kY g  hl g  |E  FI  Kb  Fb  |c gr=   