C++ API Reference for Intel® Data Analytics Acceleration Library 2020 Update 1

neural_networks_training_model.h
1 /* file: neural_networks_training_model.h */
2 /*******************************************************************************
3 * Copyright 2014-2020 Intel Corporation
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *******************************************************************************/
17 
18 /*
19 //++
20 // Implementation of neural network.
21 //--
22 */
23 
24 #ifndef __NEURAL_NETWORK_TRAINING_MODEL_H__
25 #define __NEURAL_NETWORK_TRAINING_MODEL_H__
26 
27 #include "services/daal_defines.h"
28 #include "data_management/data/tensor.h"
29 #include "data_management/data/numeric_table.h"
30 #include "services/daal_memory.h"
31 #include "algorithms/neural_networks/layers/layer.h"
32 #include "algorithms/neural_networks/layers/layer_types.h"
33 #include "algorithms/neural_networks/layers/loss/loss_layer_forward.h"
34 #include "algorithms/neural_networks/layers/split/split_layer_forward.h"
35 #include "algorithms/neural_networks/neural_networks_prediction_model.h"
36 #include "algorithms/neural_networks/neural_networks_training_topology.h"
37 
38 #include "algorithms/optimization_solver/iterative_solver/iterative_solver_batch.h"
39 
40 namespace daal
41 {
42 namespace algorithms
43 {
47 namespace neural_networks
48 {
49 namespace training
50 {
51 namespace interface1
52 {
62 class Parameter : public daal::algorithms::Parameter
63 {
64 public:
71  Parameter(const services::SharedPtr<optimization_solver::iterative_solver::Batch > &optimizationSolver_ = services::SharedPtr<optimization_solver::iterative_solver::Batch>(),
72  engines::EnginePtr engine_ = engines::mt19937::Batch<DAAL_ALGORITHM_FP_TYPE>::create()) :
73  optimizationSolver(optimizationSolver_),
74  engine(engine_) {}
75 
76  services::SharedPtr<optimization_solver::iterative_solver::Batch> optimizationSolver;
77  engines::EnginePtr engine;
78 };
79 
85 class DAAL_EXPORT Model : public neural_networks::ModelImpl
86 {
87 public:
88  DECLARE_SERIALIZABLE_CAST(Model);
89 
90  using neural_networks::ModelImpl::getWeightsAndBiases;
91  using neural_networks::ModelImpl::setWeightsAndBiases;
92 
94  /*
95  * \DAAL_DEPRECATED
96  */
97  DAAL_DEPRECATED Model();
98 
99  /*
100  * \DAAL_DEPRECATED
101  */
102  DAAL_DEPRECATED static services::SharedPtr<Model> create(services::Status *stat = NULL);
103 
105  /*
106  * \DAAL_DEPRECATED
107  */
108  DAAL_DEPRECATED Model(const Model &model) :
109  ModelImpl(model),
110  _backwardLayers(model.getBackwardLayers()),
111  _storeWeightDerivativesInTable(model._storeWeightDerivativesInTable)
112  {}
113 
115  virtual ~Model() {}
116 
126  template<typename modelFPType>
127  services::Status initialize(const services::Collection<size_t> &sampleSize, const Topology &topology,
128  const Parameter &parameter = Parameter())
129  {
130  using namespace layers;
131  using namespace services;
132 
133  size_t nLayers = topology.size();
134  Status st;
135  _backwardNextLayers = SharedPtr<Collection<NextLayers> >(new Collection<NextLayers>(nLayers));
136  if (!_backwardNextLayers)
137  {
138  st.add(services::ErrorMemoryAllocationFailed);
139  return st;
140  }
141 
142  for(size_t i = 0; i < nLayers; i++)
143  {
144  insertLayer(topology[i]);
145  }
146 
147  for(int i = (int)nLayers - 1; i >= 0; i--)
148  {
149  size_t layerId = topology[i].index();
150  const NextLayers &next = topology[i].nextLayers();
151  for (size_t j = 0; j < next.size(); j++)
152  {
153  (*_backwardNextLayers)[next[j]].push_back(layerId);
154  }
155  }
156 
157  for(int i = (int)nLayers - 1; i >= 0; i--)
158  {
159  layers::forward::LayerIfacePtr layer = getForwardLayer(i);
160  SharedPtr<split::forward::Batch<float> > splitLayerFloat = dynamicPointerCast<split::forward::Batch<float>, forward::LayerIface>(layer);
161  SharedPtr<split::forward::Batch<double> > splitLayerDouble = dynamicPointerCast<split::forward::Batch<double>, forward::LayerIface>(layer);
162  if(splitLayerFloat.get() || splitLayerDouble.get())
163  {
164  const NextLayers &next = topology[i].nextLayers();
165  for (size_t j = 0; j < next.size(); j++)
166  {
167  layers::forward::LayerIfacePtr nextLayer = getForwardLayer(next[j]);
168  nextLayer->getLayerParameter()->allowInplaceComputation = false;
169  }
170  }
171  }
172 
173  allocate<modelFPType>(sampleSize, parameter);
174 
175  for(size_t i = 0; i < nLayers; i++)
176  {
177  getForwardLayer(i)->enableResetOnCompute(false);
178  getBackwardLayer(i)->enableResetOnCompute(false);
179  }
180  return st;
181  }
182 
188  DAAL_DEPRECATED const ForwardLayersPtr getForwardLayers() const
189  {
190  return _forwardLayers;
191  }
192 
199  const layers::forward::LayerIfacePtr getForwardLayer(size_t index) const
200  {
201  return _forwardLayers->get(index);
202  }
203 
209  const BackwardLayersPtr getBackwardLayers() const
210  {
211  return _backwardLayers;
212  }
213 
220  const layers::backward::LayerIfacePtr getBackwardLayer(size_t index) const
221  {
222  return _backwardLayers->get(index);
223  }
224 
230  template<typename modelFPType>
231  const prediction::ModelPtr getPredictionModel()
232  {
233  using namespace services;
234  using namespace data_management;
235  using namespace layers;
236 
237  size_t nLayers = _forwardLayers->size();
238 
239  /* Copy forward layers */
240  ForwardLayersPtr _predictionForwardLayers(new ForwardLayers(nLayers));
241  SharedPtr<Collection<NextLayers> > _predictionNextLayers(new Collection<NextLayers>(nLayers));
242  for (size_t i = 0; i < nLayers; i++)
243  {
244  (*_predictionNextLayers)[i] = _nextLayers->get(i);
245  (*_predictionForwardLayers)[i] = ((*_forwardLayers)[i])->getLayerForPrediction();
246  (*_predictionForwardLayers)[i]->getLayerParameter()->predictionStage = true;
247  }
248 
249  bool storeWeightsInTable = true;
250  prediction::ModelPtr predictionModel(new prediction::Model(
251  _predictionForwardLayers, _predictionNextLayers, (modelFPType)0.0, storeWeightsInTable));
252 
253  predictionModel->setWeightsAndBiases(getWeightsAndBiases());
254  return predictionModel;
255  }
256 
263  DAAL_DEPRECATED bool getWeightsAndBiasesStorageStatus() const
264  {
265  return _storeWeightsInTable;
266  }
267 
276  DAAL_DEPRECATED services::Status setWeightsAndBiases(size_t idx, const data_management::NumericTablePtr &table);
277 
284  DAAL_DEPRECATED data_management::NumericTablePtr getWeightsAndBiases(size_t idx) const;
285 
291  DAAL_DEPRECATED data_management::NumericTablePtr getWeightsAndBiasesDerivatives() const;
292 
299  DAAL_DEPRECATED data_management::NumericTablePtr getWeightsAndBiasesDerivatives(size_t idx) const;
300 
309  DAAL_DEPRECATED services::Status setErrors(services::ErrorCollection &errors)
310  {
311  return services::Status();
312  }
313 
320  DAAL_DEPRECATED const services::ErrorCollection &getErrors() const { return _errors; }
321 
330  template<typename modelFPType>
331  services::Status allocate(const services::Collection<size_t> &sampleSize, const Parameter &parameter = Parameter())
332  {
333  using namespace services;
334  using namespace data_management;
335  using namespace layers;
336 
337  services::Status s;
338 
339  if (_sampleSize.size() > 0) { _sampleSize.clear(); }
340  _sampleSize = sampleSize;
341 
342  _forwardLayers->get(0)->getLayerInput()->set(forward::data,
343  TensorPtr(new HomogenTensor<modelFPType>(_sampleSize, Tensor::doAllocate)));
344 
345  size_t nLayers = _forwardLayers->size();
346 
347  for (size_t i = 0; i < nLayers; i++)
348  {
349  layers::Parameter *lParameter = _forwardLayers->get(i)->getLayerParameter();
350  initializers::Parameter *wParameter = lParameter->weightsInitializer->getParameter();
351  initializers::Parameter *bParameter = lParameter->biasesInitializer->getParameter();
352 
353  s |= connectForwardLayers(i);
354 
355  if(!wParameter->engine)
356  {
357  wParameter->engine = parameter.engine;
358  }
359  if(!bParameter->engine)
360  {
361  bParameter->engine = parameter.engine;
362  }
363  }
364 
365  bool checkWeightsAndBiasesAlloc = true;
366  s |= createWeightsAndBiases<modelFPType>(checkWeightsAndBiasesAlloc);
367  s |= enableConditionalGradientPropagation();
368  if(!s) return s;
369 
370  for (size_t i = 0; i < nLayers; i++)
371  {
372  forward::LayerIfacePtr forwardLayer = _forwardLayers->get(i);
373  forward::Input *forwardInput = forwardLayer->getLayerInput();
374 
375  forwardLayer->getLayerResult()->setResultForBackward(forwardInput);
376  }
377 
378  /* Check weights and biases derivatives allocation status before allocating the results of backward layers */
379  s |= checkWeightsAndBiasesDerivativesAllocation();
380 
381  for (int i = (int)nLayers - 1; i >= 0; i--)
382  {
383  s |= connectBackwardLayers(i);
384  }
385 
386  s |= createWeightsAndBiasesDerivatives<modelFPType>();
387  if(_solverOptionalArgumentCollection.size() == 0)
388  {
389  if(_storeWeightsInTable) _solverOptionalArgumentCollection = DataCollection(1);
390  else _solverOptionalArgumentCollection = DataCollection(nLayers);
391  }
392  return s;
393  }
394 
395 protected:
397  /*
398  * \DAAL_DEPRECATED
399  */
400  DAAL_DEPRECATED Model(services::Status &st);
401 
403  /*
404  * \DAAL_DEPRECATED
405  */
406  template<typename Archive, bool onDeserialize>
407  services::Status serialImpl(Archive *arch)
408  {
409  return services::Status();
410  }
411 
412  /*
413  * \DAAL_DEPRECATED
414  */
415  void insertLayer(const layers::LayerDescriptor &layerDescriptor)
416  {
417  _forwardLayers->insert(layerDescriptor.index(), layerDescriptor.layer()->forwardLayer->clone());
418  _backwardLayers->insert(layerDescriptor.index(), layerDescriptor.layer()->backwardLayer->clone());
419  _nextLayers->insert(layerDescriptor.index(), layerDescriptor.nextLayers());
420  }
421 
422  /*
423  * \DAAL_DEPRECATED
424  */
425  services::Status enableConditionalGradientPropagation()
426  {
427  using namespace services;
428  using namespace layers;
429 
430  services::Status s;
431 
432  size_t nLayers = _forwardLayers->size();
433 
434  /* Array of flags for the neural network layers */
435  bool *flags = (bool *)daal_malloc(nLayers * sizeof(bool));
436 
437  /* Perform depth search to disable gradient propagation in starting forward layers with weights
438  and all the previous layers */
439  s |= disableGradientPropagationInStartingLayers(nLayers, flags);
440 
441  /* Perform depth search to enable gradient propagation in the layers
442  that follow forward layers with weights */
443  s |= enableGradientPropagation(nLayers, flags);
444 
445  daal_free(flags);
446  return s;
447  }
448 
449  /*
450  * \DAAL_DEPRECATED
451  */
452  services::Status disableGradientPropagationInStartingLayers(size_t nLayers, bool *visited)
453  {
454  using namespace services;
455  using namespace layers;
456 
457  for (size_t i = 0; i < nLayers; i++)
458  {
459  visited[i] = false;
460  }
461 
462  Collection<size_t> stack;
463  stack.push_back(0 /* ID of the first forward layer */);
464  while (stack.size() > 0)
465  {
466  size_t layerId = stack[stack.size() - 1];
467  stack.erase(stack.size() - 1);
468  if (!visited[layerId])
469  {
470  visited[layerId] = true;
471 
472  forward::LayerIfacePtr forwardLayer = _forwardLayers->get(layerId);
473  forward::Input *forwardInput = forwardLayer->getLayerInput();
474  layers::Parameter *forwardParameter = forwardLayer->getLayerParameter();
475  layers::Parameter *backwardParameter = _backwardLayers->get(layerId)->getLayerParameter();
476 
477  backwardParameter->propagateGradient = false;
478 
479  if (forwardInput->getWeightsSizes(forwardParameter).size() +
480  forwardInput->getBiasesSizes(forwardParameter) .size() == 0)
481  {
482  /* Continue depth search for layers that do not have weights and biases */
483  const NextLayers &next = _nextLayers->get(layerId);
484  for (size_t i = 0; i < next.size(); i++)
485  {
486  stack.push_back(next[i]);
487  }
488  }
489  }
490  }
491  return services::Status();
492  }
493 
494  /*
495  * \DAAL_DEPRECATED
496  */
497  services::Status enableGradientPropagationInSubsequentLayers(size_t startLayerId, size_t nLayers, bool *enabledPropagation)
498  {
499  using namespace services;
500  using namespace layers;
501  Collection<size_t> stack;
502  const NextLayers &next = _nextLayers->get(startLayerId);
503  for (size_t i = 0; i < next.size(); i++)
504  {
505  stack.push_back(next[i]);
506  }
507  while (stack.size() > 0)
508  {
509  size_t layerId = stack[stack.size() - 1];
510  stack.erase(stack.size() - 1);
511  if (!enabledPropagation[layerId])
512  {
513  enabledPropagation[layerId] = true;
514  backward::LayerIfacePtr backwardLayer = _backwardLayers->get(layerId);
515  backwardLayer->getLayerParameter()->propagateGradient = true;
516  const NextLayers &next = _nextLayers->get(layerId);
517  for (size_t i = 0; i < next.size(); i++)
518  {
519  stack.push_back(next[i]);
520  }
521  }
522  }
523  return services::Status();
524  }
525 
526  /*
527  * \DAAL_DEPRECATED
528  */
529  services::Status enableGradientPropagation(size_t nLayers, bool *enabledPropagation)
530  {
531  using namespace services;
532  using namespace layers;
533  Collection<size_t> stack;
534  stack.push_back(0 /* ID of the first forward layer */);
535 
536  for (size_t i = 0; i < nLayers; i++)
537  {
538  enabledPropagation[i] = false;
539  }
540 
541  while (stack.size() > 0)
542  {
543  size_t layerId = stack[stack.size() - 1];
544  stack.erase(stack.size() - 1);
545  if (!enabledPropagation[layerId])
546  {
547  forward::LayerIfacePtr forwardLayer = _forwardLayers->get(layerId);
548  forward::Input *forwardInput = forwardLayer->getLayerInput();
549  layers::Parameter *forwardParameter = forwardLayer->getLayerParameter();
550  layers::Parameter *backwardParameter = _backwardLayers->get(layerId)->getLayerParameter();
551 
552  if (backwardParameter->propagateGradient == false &&
553  (forwardInput->getWeightsSizes(forwardParameter).size() +
554  forwardInput->getBiasesSizes(forwardParameter) .size()) > 0)
555  {
556  enableGradientPropagationInSubsequentLayers(layerId, nLayers, enabledPropagation);
557  }
558  else
559  {
560  const NextLayers &next = _nextLayers->get(layerId);
561  for (size_t i = 0; i < next.size(); i++)
562  {
563  stack.push_back(next[i]);
564  }
565  }
566  }
567  }
568  return services::Status();
569  }
570 
571  /*
572  * \DAAL_DEPRECATED
573  */
574  services::Status checkWeightsAndBiasesDerivativesAllocation()
575  {
576  using namespace services;
577  using namespace layers;
578 
579  _storeWeightDerivativesInTable = true;
580  size_t nLayers = _backwardLayers->size();
581  for (size_t i = 0; i < nLayers; i++)
582  {
583  backward::LayerIfacePtr &backwardLayer = _backwardLayers->get(i);
584  if (!backwardLayer) { continue; }
585  backward::ResultPtr backwardResult = backwardLayer->getLayerResult();
586  /* Check if weight and bias derivatives are allocated by user */
587  if (backwardResult->get(backward::weightDerivatives) || backwardResult->get(backward::biasDerivatives))
588  {
589  _storeWeightDerivativesInTable = false;
590  break;
591  }
592  }
593  return services::Status();
594  }
595 
596  /*
597  * \DAAL_DEPRECATED
598  */
599  services::Status connectBackwardLayers(size_t layerId)
600  {
601  using namespace services;
602  using namespace data_management;
603  using namespace layers;
604 
605  forward::LayerIfacePtr &forwardLayer = _forwardLayers->get(layerId);
606  backward::LayerIfacePtr &backwardLayer = _backwardLayers->get(layerId);
607 
608  if (!forwardLayer || !backwardLayer) { return services::Status(); }
609 
610  backward::Input *backwardInput = backwardLayer->getLayerInput();
611  forward::ResultPtr forwardResult = forwardLayer->getLayerResult();
612 
613  backwardInput->setInputFromForward(forwardResult);
614  backwardLayer->allocateResult();
615 
616  /* Don't connect backward layer to next backward layers
617  if the layer does not propagate gradient */
618  if (!backwardLayer->getLayerParameter()->propagateGradient) { return services::Status(); }
619 
620  backward::ResultPtr backwardResult = backwardLayer->getLayerResult();
621 
622  const NextLayers &next = _backwardNextLayers->get(layerId);
623  const size_t nextLayersSize = next.size();
624  for(size_t j = 0; j < nextLayersSize; j++)
625  {
626  size_t inputIndex = nextLayersSize - j - 1;
627  _backwardLayers->get(next[j])->addInput(backwardResult, inputIndex, 0 /* index in input object of next[j] backward layer */);
628  }
629  return services::Status();
630  }
631 
632  /*
633  * \DAAL_DEPRECATED
634  */
635  template<typename modelFPType>
636  DAAL_EXPORT services::Status createWeightsAndBiasesDerivatives();
637 
638 public:
645  DAAL_DEPRECATED algorithms::OptionalArgumentPtr getSolverOptionalArgument(size_t index)
646  {
647  return services::dynamicPointerCast<algorithms::OptionalArgument, data_management::SerializationIface>(_solverOptionalArgumentCollection[index]);
648  }
649 
658  DAAL_DEPRECATED services::Status setSolverOptionalArgument(const algorithms::OptionalArgumentPtr& solverOptionalArgument, size_t index)
659  {
660  _solverOptionalArgumentCollection[index] = solverOptionalArgument;
661  return services::Status();
662  }
663 
669  DAAL_DEPRECATED data_management::DataCollection getSolverOptionalArgumentCollection()
670  {
671  return _solverOptionalArgumentCollection;
672  }
673 
681  DAAL_DEPRECATED services::Status setSolverOptionalArgumentCollection(const data_management::DataCollection &solverOptionalArgumentCollection)
682  {
683  _solverOptionalArgumentCollection = solverOptionalArgumentCollection;
684  return services::Status();
685  }
686 
687 private:
688  data_management::DataCollection _solverOptionalArgumentCollection;
689  services::Collection<size_t> _sampleSize;
690  BackwardLayersPtr _backwardLayers;
691  services::SharedPtr<services::Collection<layers::NextLayers> > _backwardNextLayers;
692  mutable services::ErrorCollection _errors;
694  bool _storeWeightDerivativesInTable;
695  LearnableParametersIfacePtr _weightsAndBiasesDerivatives;
696 };
697 
698 typedef services::SharedPtr<Model> ModelPtr;
701 } // namespace interface1
702 using interface1::Parameter;
703 using interface1::Model;
704 using interface1::ModelPtr;
705 
706 } // namespace training
707 } // namespace neural_networks
708 } // namespace algorithms
709 } // namespace daal
710 #endif
daal::algorithms::neural_networks::training::interface1::Model::getForwardLayer
const layers::forward::LayerIfacePtr getForwardLayer(size_t index) const
Definition: neural_networks_training_model.h:199
daal::algorithms::neural_networks::training::interface1::Model::getSolverOptionalArgumentCollection
DAAL_DEPRECATED data_management::DataCollection getSolverOptionalArgumentCollection()
Definition: neural_networks_training_model.h:669
daal::algorithms::interface1::Model
The base class for the classes that represent the models, such as linear_regression::Model or svm::Mo...
Definition: model.h:54
daal::algorithms::neural_networks::training::interface1::Model::getWeightsAndBiasesStorageStatus
DAAL_DEPRECATED bool getWeightsAndBiasesStorageStatus() const
Definition: neural_networks_training_model.h:263
daal::algorithms::neural_networks::training::interface1::Model::~Model
virtual ~Model()
Destructor.
Definition: neural_networks_training_model.h:115
daal::algorithms::neural_networks::training::interface1::Parameter::engine
engines::EnginePtr engine
Definition: neural_networks_training_model.h:77
daal::algorithms::neural_networks::training::interface1::Model::setSolverOptionalArgument
DAAL_DEPRECATED services::Status setSolverOptionalArgument(const algorithms::OptionalArgumentPtr &solverOptionalArgument, size_t index)
Definition: neural_networks_training_model.h:658
daal::algorithms::neural_networks::training::interface1::Topology::push_back
size_t push_back(const layers::LayerIfacePtr &layer)
Definition: neural_networks_training_topology.h:78
daal::algorithms::neural_networks::training::interface1::Model::initialize
services::Status initialize(const services::Collection< size_t > &sampleSize, const Topology &topology, const Parameter &parameter=Parameter())
Definition: neural_networks_training_model.h:127
daal::algorithms::association_rules::data
Definition: apriori_types.h:83
daal::algorithms::neural_networks::training::interface1::Model::allocate
services::Status allocate(const services::Collection< size_t > &sampleSize, const Parameter &parameter=Parameter())
Definition: neural_networks_training_model.h:331
daal::algorithms::neural_networks::training::interface1::Model::Model
DAAL_DEPRECATED Model(const Model &model)
Copy constructor.
Definition: neural_networks_training_model.h:108
daal::algorithms::neural_networks::training::interface1::Model::getForwardLayers
DAAL_DEPRECATED const ForwardLayersPtr getForwardLayers() const
Definition: neural_networks_training_model.h:188
daal::algorithms::neural_networks::training::interface1::Model::getBackwardLayers
const BackwardLayersPtr getBackwardLayers() const
Definition: neural_networks_training_model.h:209
daal::algorithms::neural_networks::training::interface1::Topology::size
size_t size() const
Definition: neural_networks_training_topology.h:70
daal::algorithms::neural_networks::training::interface1::Model::getPredictionModel
const prediction::ModelPtr getPredictionModel()
Definition: neural_networks_training_model.h:231
daal_defines.h
daal::algorithms::neural_networks::training::interface1::Parameter::Parameter
Parameter(const services::SharedPtr< optimization_solver::iterative_solver::Batch > &optimizationSolver_=services::SharedPtr< optimization_solver::iterative_solver::Batch >(), engines::EnginePtr engine_=engines::mt19937::Batch< DAAL_ALGORITHM_FP_TYPE >::create())
Definition: neural_networks_training_model.h:71
daal::algorithms::neural_networks::training::interface1::Model::getErrors
DAAL_DEPRECATED const services::ErrorCollection & getErrors() const
Definition: neural_networks_training_model.h:320
daal::algorithms::neural_networks::training::interface1::Model::getSolverOptionalArgument
DAAL_DEPRECATED algorithms::OptionalArgumentPtr getSolverOptionalArgument(size_t index)
Definition: neural_networks_training_model.h:645
daal::algorithms::neural_networks::training::interface1::Topology
Class defining a neural network topology - a set of layers and connection between them - on the train...
Definition: neural_networks_training_topology.h:43
daal::algorithms::neural_networks::training::interface1::Model::getBackwardLayer
const layers::backward::LayerIfacePtr getBackwardLayer(size_t index) const
Definition: neural_networks_training_model.h:220
daal::algorithms::interface1::Parameter
Base class to represent computation parameters. Algorithm-specific parameters are represented as deri...
Definition: algorithm_types.h:62
daal::algorithms::neural_networks::training::interface1::Parameter::optimizationSolver
services::SharedPtr< optimization_solver::iterative_solver::Batch > optimizationSolver
Definition: neural_networks_training_model.h:76
daal::services::daal_malloc
DAAL_EXPORT void * daal_malloc(size_t size, size_t alignment=DAAL_MALLOC_DEFAULT_ALIGNMENT)
daal::algorithms::neural_networks::training::interface1::Model
Class representing the model of neural network.
Definition: neural_networks_training_model.h:85
daal::services::daal_free
DAAL_EXPORT void daal_free(void *ptr)
daal::services::ErrorMemoryAllocationFailed
Definition: error_indexes.h:150
daal::algorithms::neural_networks::training::interface1::Model::setErrors
DAAL_DEPRECATED services::Status setErrors(services::ErrorCollection &errors)
Definition: neural_networks_training_model.h:309
daal::algorithms::neural_networks::training::interface1::Model::setSolverOptionalArgumentCollection
DAAL_DEPRECATED services::Status setSolverOptionalArgumentCollection(const data_management::DataCollection &solverOptionalArgumentCollection)
Definition: neural_networks_training_model.h:681
daal::algorithms::neural_networks::training::model
Definition: neural_networks_training_result.h:54
daal::algorithms::neural_networks::training::interface1::Parameter
Class representing the parameters of neural network.
Definition: neural_networks_training_model.h:62

For more complete information about compiler optimizations, see our Optimization Notice.