Liking cljdoc? Tell your friends :D

clj-nd4j.ml.gradient


->ada-delta-configclj

(->ada-delta-config {:keys [rho epsilon]
                     :or {rho AdaDelta/DEFAULT_ADADELTA_RHO
                          epsilon AdaDelta/DEFAULT_ADADELTA_EPSILON}})
source

->ada-delta-updaterclj

(->ada-delta-updater obj)
source

->ada-grad-configclj

(->ada-grad-config {:keys [learning-rate epsilon]
                    :or {learning-rate AdaGrad/DEFAULT_ADAGRAD_LEARNING_RATE
                         epsilon AdaGrad/DEFAULT_ADAGRAD_EPSILON}})
source

->ada-grad-updaterclj

(->ada-grad-updater obj)
source

->ada-max-configclj

(->ada-max-config {:keys [learning-rate beta1 beta2 epsilon]
                   :or {learning-rate AdaMax/DEFAULT_ADAMAX_LEARNING_RATE
                        beta1 AdaMax/DEFAULT_ADAMAX_BETA1_MEAN_DECAY
                        beta2 AdaMax/DEFAULT_ADAMAX_BETA2_VAR_DECAY
                        epsilon AdaMax/DEFAULT_ADAMAX_EPSILON}})
source

->ada-max-updaterclj

(->ada-max-updater obj)
source

->adam-configclj

(->adam-config {:keys [learning-rate beta1 beta2 epsilon]
                :or {learning-rate Adam/DEFAULT_ADAM_LEARNING_RATE
                     beta1 Adam/DEFAULT_ADAM_BETA1_MEAN_DECAY
                     beta2 Adam/DEFAULT_ADAM_BETA2_VAR_DECAY
                     epsilon Adam/DEFAULT_ADAM_EPSILON}})
source

->adam-updaterclj

(->adam-updater obj)
source

->ams-grad-configclj

(->ams-grad-config {:keys [learning-rate beta1 beta2 epsilon]
                    :or {learning-rate AMSGrad/DEFAULT_AMSGRAD_LEARNING_RATE
                         beta1 AMSGrad/DEFAULT_AMSGRAD_BETA1_MEAN_DECAY
                         beta2 AMSGrad/DEFAULT_AMSGRAD_BETA2_VAR_DECAY
                         epsilon AMSGrad/DEFAULT_AMSGRAD_EPSILON}})
source

->ams-grad-updaterclj

(->ams-grad-updater obj)
source

->gradient-updater-configclj

(->gradient-updater-config {:keys [type] :as options})
(->gradient-updater-config type options)

Builds a gradient updater configuration (IUpdater) from updater-kind and specific arguments. Basically requires two arguments, with both being able to be nested into a single map. Throws an exception if the updater does not exist. See documentation Input :

  • kind : gradient updater kind as a keyword
  • options : gradient updater specific configuration Usage : (->gradient-updater-config :ada-delta {:rho 1.256}) ~ (->gradient-updater-config {:kind :ada-delta , options {:rho 1.256}})
Builds a gradient updater configuration (IUpdater)
from updater-kind and specific arguments.
Basically requires two arguments, with both being
able to be nested into a single map.
Throws an exception if the updater does not exist.
See documentation
Input :
- kind : gradient updater kind as a keyword
- options : gradient updater specific configuration
Usage :
(->gradient-updater-config :ada-delta {:rho 1.256})
~
(->gradient-updater-config {:kind :ada-delta , options {:rho 1.256}})
sourceraw docstring

->nadam-configclj

(->nadam-config {:keys [learning-rate beta1 beta2 epsilon]
                 :or {learning-rate Nadam/DEFAULT_NADAM_LEARNING_RATE
                      beta1 Nadam/DEFAULT_NADAM_BETA1_MEAN_DECAY
                      beta2 Nadam/DEFAULT_NADAM_BETA2_VAR_DECAY
                      epsilon Nadam/DEFAULT_NADAM_EPSILON}})
source

->nadam-updaterclj

(->nadam-updater obj)
source

->nesterovs-configclj

(->nesterovs-config {:keys [learning-rate momentum]
                     :or {learning-rate Nesterovs/DEFAULT_NESTEROV_LEARNING_RATE
                          momentum Nesterovs/DEFAULT_NESTEROV_MOMENTUM}})
source

->nesterovs-updaterclj

(->nesterovs-updater obj)
source

->rms-prop-configclj

(->rms-prop-config {:keys [learning-rate rms-decay epsilon]
                    :or {learning-rate RmsProp/DEFAULT_RMSPROP_LEARNING_RATE
                         rms-decay RmsProp/DEFAULT_RMSPROP_RMSDECAY
                         epsilon RmsProp/DEFAULT_RMSPROP_EPSILON}})
source

->rms-prop-updaterclj

(->rms-prop-updater obj)
source

->sgd-configclj

(->sgd-config {:keys [learning-rate] :or {learning-rate Sgd/DEFAULT_SGD_LR}})
source

->sgd-updaterclj

(->sgd-updater obj)
source

ada-delta-config?clj

(ada-delta-config? obj)
source

ada-delta-updaterclj

(ada-delta-updater config)
source

ada-grad-config?clj

(ada-grad-config? obj)
source

ada-grad-updaterclj

(ada-grad-updater config)
source

ada-max-config?clj

(ada-max-config? obj)
source

ada-max-updaterclj

(ada-max-updater config)
source

adam-config?clj

(adam-config? obj)
source

adam-updaterclj

(adam-updater config)
source

ams-grad-config?clj

(ams-grad-config? obj)
source

ams-grad-updaterclj

(ams-grad-updater config)
source

gradient-update-configsclj

source

nadam-config?clj

(nadam-config? obj)
source

nadam-updaterclj

(nadam-updater config)
source

nesterovs-config?clj

(nesterovs-config? obj)
source

nesterovs-updaterclj

(nesterovs-updater config)
source

rms-prop-config?clj

(rms-prop-config? obj)
source

rms-prop-updaterclj

(rms-prop-updater config)
source

sgd-config?clj

(sgd-config? obj)
source

sgd-updaterclj

(sgd-updater config)
source

cljdoc is a website building & hosting documentation for Clojure/Script libraries

× close