Class: CNTK::Trainer

Inherits:
Object
  • Object
show all
Defined in:
lib/cntk/trainer.rb,
ext/cntk/cntk_wrap.cxx

Class Method Summary collapse

Instance Method Summary collapse

Class Method Details

.create(model: nil, loss: nil, evaluation: nil, learners: nil) ⇒ Object



30
31
32
33
34
35
36
37
38
39
40
41
42
43
# File 'lib/cntk/trainer.rb', line 30

def create(model: nil, loss: nil, evaluation: nil, learners: nil)
  unless model and loss and learners
    raise ArgumentError, "model, loss function, and learners needed"
  end
  model     = variable_to_function(model)
  loss      = variable_to_function(loss)
  evaluation = variable_to_function(evaluation) if evaluation
  learners  = [learners] unless learners.is_a?(Array)
  if evaluation
    CNTK.__create_trainer__(model, loss, evaluation, learners)
  else
    CNTK.__create_trainer__(model, loss, learners)
  end
end

Instance Method Details

#__test_minibatchdata__(*args, self) ⇒ Object



58808
58809
58810
58811
58812
58813
58814
58815
58816
58817
58818
58819
58820
58821
58822
58823
58824
58825
58826
58827
58828
58829
58830
58831
58832
58833
58834
58835
58836
58837
58838
58839
58840
58841
58842
58843
58844
58845
58846
58847
58848
58849
58850
58851
58852
58853
58854
58855
# File 'ext/cntk/cntk_wrap.cxx', line 58808

SWIGINTERN VALUE _wrap_Trainer___test_minibatchdata__(int nargs, VALUE *args, VALUE self) {
  int argc;
  VALUE argv[4];
  int ii;
  
  argc = nargs + 1;
  argv[0] = self;
  if (argc > 4) SWIG_fail;
  for (ii = 1; (ii < argc); ++ii) {
    argv[ii] = args[ii-1];
  }
  if (argc == 2) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        return _wrap_Trainer___test_minibatchdata____SWIG_1(nargs, args, self);
      }
    }
  }
  if (argc == 3) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_CNTK__DeviceDescriptor, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          return _wrap_Trainer___test_minibatchdata____SWIG_0(nargs, args, self);
        }
      }
    }
  }
  
fail:
  Ruby_Format_OverloadedError( argc, 4, "Trainer.__test_minibatchdata__", 
    "    double Trainer.__test_minibatchdata__(std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > > const &arguments, CNTK::DeviceDescriptor const &computeDevice)\n"
    "    double Trainer.__test_minibatchdata__(std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > > const &arguments)\n");
  
  return Qnil;
}

#__train_minibatch__(*args, self) ⇒ Object



58568
58569
58570
58571
58572
58573
58574
58575
58576
58577
58578
58579
58580
58581
58582
58583
58584
58585
58586
58587
58588
58589
58590
58591
58592
58593
58594
58595
58596
58597
58598
58599
58600
58601
58602
58603
58604
58605
58606
58607
58608
58609
58610
58611
58612
58613
58614
58615
58616
58617
58618
58619
58620
58621
58622
58623
58624
58625
58626
58627
58628
58629
58630
58631
58632
58633
58634
58635
58636
58637
58638
58639
58640
58641
58642
58643
58644
58645
58646
58647
58648
58649
58650
58651
58652
58653
58654
58655
58656
# File 'ext/cntk/cntk_wrap.cxx', line 58568

SWIGINTERN VALUE _wrap_Trainer___train_minibatch__(int nargs, VALUE *args, VALUE self) {
  int argc;
  VALUE argv[5];
  int ii;
  
  argc = nargs + 1;
  argv[0] = self;
  if (argc > 5) SWIG_fail;
  for (ii = 1; (ii < argc); ++ii) {
    argv[ii] = args[ii-1];
  }
  if (argc == 2) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,std::shared_ptr< CNTK::Value >,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,std::shared_ptr< CNTK::Value > > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        return _wrap_Trainer___train_minibatch____SWIG_1(nargs, args, self);
      }
    }
  }
  if (argc == 3) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,std::shared_ptr< CNTK::Value >,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,std::shared_ptr< CNTK::Value > > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_CNTK__DeviceDescriptor, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          return _wrap_Trainer___train_minibatch____SWIG_0(nargs, args, self);
        }
      }
    }
  }
  if (argc == 3) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,std::shared_ptr< CNTK::Value >,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,std::shared_ptr< CNTK::Value > > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_std__unordered_mapT_CNTK__Variable_std__shared_ptrT_CNTK__Value_t_std__hashT_CNTK__Variable_t_std__equal_toT_CNTK__Variable_t_std__allocatorT_std__pairT_CNTK__Variable_const_std__shared_ptrT_CNTK__Value_t_t_t_t, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          return _wrap_Trainer___train_minibatch____SWIG_3(nargs, args, self);
        }
      }
    }
  }
  if (argc == 4) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,std::shared_ptr< CNTK::Value >,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,std::shared_ptr< CNTK::Value > > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_std__unordered_mapT_CNTK__Variable_std__shared_ptrT_CNTK__Value_t_std__hashT_CNTK__Variable_t_std__equal_toT_CNTK__Variable_t_std__allocatorT_std__pairT_CNTK__Variable_const_std__shared_ptrT_CNTK__Value_t_t_t_t, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          void *vptr = 0;
          int res = SWIG_ConvertPtr(argv[3], &vptr, SWIGTYPE_p_CNTK__DeviceDescriptor, 0);
          _v = SWIG_CheckState(res);
          if (_v) {
            return _wrap_Trainer___train_minibatch____SWIG_2(nargs, args, self);
          }
        }
      }
    }
  }
  
fail:
  Ruby_Format_OverloadedError( argc, 5, "Trainer.__train_minibatch__", 
    "    bool Trainer.__train_minibatch__(std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > const &arguments, CNTK::DeviceDescriptor const &computeDevice)\n"
    "    bool Trainer.__train_minibatch__(std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > const &arguments)\n"
    "    bool Trainer.__train_minibatch__(std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > const &arguments, std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > &outputsToFetch, CNTK::DeviceDescriptor const &computeDevice)\n"
    "    bool Trainer.__train_minibatch__(std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > const &arguments, std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > &outputsToFetch)\n");
  
  return Qnil;
}

#__train_minibatchdata__(*args, self) ⇒ Object



58306
58307
58308
58309
58310
58311
58312
58313
58314
58315
58316
58317
58318
58319
58320
58321
58322
58323
58324
58325
58326
58327
58328
58329
58330
58331
58332
58333
58334
58335
58336
58337
58338
58339
58340
58341
58342
58343
58344
58345
58346
58347
58348
58349
58350
58351
58352
58353
58354
58355
58356
58357
58358
58359
58360
58361
58362
58363
58364
58365
58366
58367
58368
58369
58370
58371
58372
58373
58374
58375
58376
58377
58378
58379
58380
58381
58382
58383
58384
58385
58386
58387
58388
58389
58390
58391
58392
58393
58394
# File 'ext/cntk/cntk_wrap.cxx', line 58306

SWIGINTERN VALUE _wrap_Trainer___train_minibatchdata__(int nargs, VALUE *args, VALUE self) {
  int argc;
  VALUE argv[5];
  int ii;
  
  argc = nargs + 1;
  argv[0] = self;
  if (argc > 5) SWIG_fail;
  for (ii = 1; (ii < argc); ++ii) {
    argv[ii] = args[ii-1];
  }
  if (argc == 2) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        return _wrap_Trainer___train_minibatchdata____SWIG_1(nargs, args, self);
      }
    }
  }
  if (argc == 3) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_CNTK__DeviceDescriptor, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          return _wrap_Trainer___train_minibatchdata____SWIG_0(nargs, args, self);
        }
      }
    }
  }
  if (argc == 3) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_std__unordered_mapT_CNTK__Variable_std__shared_ptrT_CNTK__Value_t_std__hashT_CNTK__Variable_t_std__equal_toT_CNTK__Variable_t_std__allocatorT_std__pairT_CNTK__Variable_const_std__shared_ptrT_CNTK__Value_t_t_t_t, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          return _wrap_Trainer___train_minibatchdata____SWIG_3(nargs, args, self);
        }
      }
    }
  }
  if (argc == 4) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = swig::asptr(argv[1], (std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > >**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_std__unordered_mapT_CNTK__Variable_std__shared_ptrT_CNTK__Value_t_std__hashT_CNTK__Variable_t_std__equal_toT_CNTK__Variable_t_std__allocatorT_std__pairT_CNTK__Variable_const_std__shared_ptrT_CNTK__Value_t_t_t_t, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          void *vptr = 0;
          int res = SWIG_ConvertPtr(argv[3], &vptr, SWIGTYPE_p_CNTK__DeviceDescriptor, 0);
          _v = SWIG_CheckState(res);
          if (_v) {
            return _wrap_Trainer___train_minibatchdata____SWIG_2(nargs, args, self);
          }
        }
      }
    }
  }
  
fail:
  Ruby_Format_OverloadedError( argc, 5, "Trainer.__train_minibatchdata__", 
    "    bool Trainer.__train_minibatchdata__(std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > > const &arguments, CNTK::DeviceDescriptor const &computeDevice)\n"
    "    bool Trainer.__train_minibatchdata__(std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > > const &arguments)\n"
    "    bool Trainer.__train_minibatchdata__(std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > > const &arguments, std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > &outputsToFetch, CNTK::DeviceDescriptor const &computeDevice)\n"
    "    bool Trainer.__train_minibatchdata__(std::unordered_map< CNTK::Variable,CNTK::MinibatchData,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::MinibatchData > > > const &arguments, std::unordered_map< CNTK::Variable,CNTK::ValuePtr,std::hash< CNTK::Variable >,std::equal_to< CNTK::Variable >,std::allocator< std::pair< CNTK::Variable const,CNTK::ValuePtr > > > &outputsToFetch)\n");
  
  return Qnil;
}

#evaluation_function(*args) ⇒ Object



59436
59437
59438
59439
59440
59441
59442
59443
59444
59445
59446
59447
59448
59449
59450
59451
59452
59453
59454
59455
59456
59457
59458
59459
59460
59461
59462
59463
59464
59465
59466
59467
59468
59469
59470
59471
59472
59473
59474
59475
59476
59477
59478
59479
59480
59481
59482
59483
59484
59485
59486
59487
59488
59489
59490
# File 'ext/cntk/cntk_wrap.cxx', line 59436

SWIGINTERN VALUE
_wrap_Trainer_evaluation_function(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer > tempshared1 ;
  std::shared_ptr< CNTK::Trainer > *smartarg1 = 0 ;
  CNTK::FunctionPtr result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer *","EvaluationFunction", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (arg1)->EvaluationFunction(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  {
    std::shared_ptr<  CNTK::Function > *smartresult = result ? new std::shared_ptr<  CNTK::Function >(result) : 0;
    vresult = SWIG_NewPointerObj(SWIG_as_voidptr(smartresult), SWIGTYPE_p_std__shared_ptrT_CNTK__Function_t, SWIG_POINTER_OWN);
  }
  return vresult;
fail:
  return Qnil;
}

#loss_function(*args) ⇒ Object



59379
59380
59381
59382
59383
59384
59385
59386
59387
59388
59389
59390
59391
59392
59393
59394
59395
59396
59397
59398
59399
59400
59401
59402
59403
59404
59405
59406
59407
59408
59409
59410
59411
59412
59413
59414
59415
59416
59417
59418
59419
59420
59421
59422
59423
59424
59425
59426
59427
59428
59429
59430
59431
59432
59433
# File 'ext/cntk/cntk_wrap.cxx', line 59379

SWIGINTERN VALUE
_wrap_Trainer_loss_function(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer > tempshared1 ;
  std::shared_ptr< CNTK::Trainer > *smartarg1 = 0 ;
  CNTK::FunctionPtr result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer *","LossFunction", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (arg1)->LossFunction(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  {
    std::shared_ptr<  CNTK::Function > *smartresult = result ? new std::shared_ptr<  CNTK::Function >(result) : 0;
    vresult = SWIG_NewPointerObj(SWIG_as_voidptr(smartresult), SWIGTYPE_p_std__shared_ptrT_CNTK__Function_t, SWIG_POINTER_OWN);
  }
  return vresult;
fail:
  return Qnil;
}

#model(*args) ⇒ Object



59322
59323
59324
59325
59326
59327
59328
59329
59330
59331
59332
59333
59334
59335
59336
59337
59338
59339
59340
59341
59342
59343
59344
59345
59346
59347
59348
59349
59350
59351
59352
59353
59354
59355
59356
59357
59358
59359
59360
59361
59362
59363
59364
59365
59366
59367
59368
59369
59370
59371
59372
59373
59374
59375
59376
# File 'ext/cntk/cntk_wrap.cxx', line 59322

SWIGINTERN VALUE
_wrap_Trainer_model(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer > tempshared1 ;
  std::shared_ptr< CNTK::Trainer > *smartarg1 = 0 ;
  CNTK::FunctionPtr result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer *","Model", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (arg1)->Model(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  {
    std::shared_ptr<  CNTK::Function > *smartresult = result ? new std::shared_ptr<  CNTK::Function >(result) : 0;
    vresult = SWIG_NewPointerObj(SWIG_as_voidptr(smartresult), SWIGTYPE_p_std__shared_ptrT_CNTK__Function_t, SWIG_POINTER_OWN);
  }
  return vresult;
fail:
  return Qnil;
}

#parameter_learners(*args) ⇒ Object



59655
59656
59657
59658
59659
59660
59661
59662
59663
59664
59665
59666
59667
59668
59669
59670
59671
59672
59673
59674
59675
59676
59677
59678
59679
59680
59681
59682
59683
59684
59685
59686
59687
59688
59689
59690
59691
59692
59693
59694
59695
59696
59697
59698
59699
59700
59701
59702
59703
59704
59705
59706
# File 'ext/cntk/cntk_wrap.cxx', line 59655

SWIGINTERN VALUE
_wrap_Trainer_parameter_learners(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer const > tempshared1 ;
  std::shared_ptr< CNTK::Trainer const > *smartarg1 = 0 ;
  std::vector< CNTK::LearnerPtr,std::allocator< CNTK::LearnerPtr > > *result = 0 ;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer const *","ParameterLearners", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (std::vector< CNTK::LearnerPtr,std::allocator< CNTK::LearnerPtr > > *) &((CNTK::Trainer const *)arg1)->ParameterLearners(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  vresult = SWIG_NewPointerObj(SWIG_as_voidptr(result), SWIGTYPE_p_std__vectorT_std__shared_ptrT_CNTK__Learner_t_std__allocatorT_std__shared_ptrT_CNTK__Learner_t_t_t, 0 |  0 );
  return vresult;
fail:
  return Qnil;
}

#previous_minibatch_evaluation_average(*args) ⇒ Object



59547
59548
59549
59550
59551
59552
59553
59554
59555
59556
59557
59558
59559
59560
59561
59562
59563
59564
59565
59566
59567
59568
59569
59570
59571
59572
59573
59574
59575
59576
59577
59578
59579
59580
59581
59582
59583
59584
59585
59586
59587
59588
59589
59590
59591
59592
59593
59594
59595
59596
59597
59598
# File 'ext/cntk/cntk_wrap.cxx', line 59547

SWIGINTERN VALUE
_wrap_Trainer_previous_minibatch_evaluation_average(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer const > tempshared1 ;
  std::shared_ptr< CNTK::Trainer const > *smartarg1 = 0 ;
  double result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer const *","PreviousMinibatchEvaluationAverage", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (double)((CNTK::Trainer const *)arg1)->PreviousMinibatchEvaluationAverage(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  vresult = SWIG_From_double(static_cast< double >(result));
  return vresult;
fail:
  return Qnil;
}

#previous_minibatch_loss_average(*args) ⇒ Object



59493
59494
59495
59496
59497
59498
59499
59500
59501
59502
59503
59504
59505
59506
59507
59508
59509
59510
59511
59512
59513
59514
59515
59516
59517
59518
59519
59520
59521
59522
59523
59524
59525
59526
59527
59528
59529
59530
59531
59532
59533
59534
59535
59536
59537
59538
59539
59540
59541
59542
59543
59544
# File 'ext/cntk/cntk_wrap.cxx', line 59493

SWIGINTERN VALUE
_wrap_Trainer_previous_minibatch_loss_average(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer const > tempshared1 ;
  std::shared_ptr< CNTK::Trainer const > *smartarg1 = 0 ;
  double result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer const *","PreviousMinibatchLossAverage", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (double)((CNTK::Trainer const *)arg1)->PreviousMinibatchLossAverage(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  vresult = SWIG_From_double(static_cast< double >(result));
  return vresult;
fail:
  return Qnil;
}

#previous_minibatch_sample_count(*args) ⇒ Object



59601
59602
59603
59604
59605
59606
59607
59608
59609
59610
59611
59612
59613
59614
59615
59616
59617
59618
59619
59620
59621
59622
59623
59624
59625
59626
59627
59628
59629
59630
59631
59632
59633
59634
59635
59636
59637
59638
59639
59640
59641
59642
59643
59644
59645
59646
59647
59648
59649
59650
59651
59652
# File 'ext/cntk/cntk_wrap.cxx', line 59601

SWIGINTERN VALUE
_wrap_Trainer_previous_minibatch_sample_count(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer > tempshared1 ;
  std::shared_ptr< CNTK::Trainer > *smartarg1 = 0 ;
  size_t result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer *","PreviousMinibatchSampleCount", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = (arg1)->PreviousMinibatchSampleCount(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  vresult = SWIG_From_size_t(static_cast< size_t >(result));
  return vresult;
fail:
  return Qnil;
}

#restore_from_checkpoint(*args) ⇒ Object



59253
59254
59255
59256
59257
59258
59259
59260
59261
59262
59263
59264
59265
59266
59267
59268
59269
59270
59271
59272
59273
59274
59275
59276
59277
59278
59279
59280
59281
59282
59283
59284
59285
59286
59287
59288
59289
59290
59291
59292
59293
59294
59295
59296
59297
59298
59299
59300
59301
59302
59303
59304
59305
59306
59307
59308
59309
59310
59311
59312
59313
59314
59315
59316
59317
59318
59319
# File 'ext/cntk/cntk_wrap.cxx', line 59253

SWIGINTERN VALUE
_wrap_Trainer_restore_from_checkpoint(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  std::wstring *arg2 = 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer > tempshared1 ;
  std::shared_ptr< CNTK::Trainer > *smartarg1 = 0 ;
  int res2 = SWIG_OLDOBJ ;
  CNTK::Dictionary result;
  VALUE vresult = Qnil;
  
  if ((argc < 1) || (argc > 1)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 1)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer *","RestoreFromCheckpoint", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr<  CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    std::wstring *ptr = (std::wstring *)0;
    res2 = SWIG_AsPtr_std_wstring(argv[0], &ptr);
    if (!SWIG_IsOK(res2)) {
      SWIG_exception_fail(SWIG_ArgError(res2), Ruby_Format_TypeError( "", "std::wstring const &","RestoreFromCheckpoint", 2, argv[0] )); 
    }
    if (!ptr) {
      SWIG_exception_fail(SWIG_ValueError, Ruby_Format_TypeError("invalid null reference ", "std::wstring const &","RestoreFromCheckpoint", 2, argv[0])); 
    }
    arg2 = ptr;
  }
  {
    try {
      result = (arg1)->RestoreFromCheckpoint((std::wstring const &)*arg2); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  vresult = SWIG_NewPointerObj((new CNTK::Dictionary(static_cast< const CNTK::Dictionary& >(result))), SWIGTYPE_p_CNTK__Dictionary, SWIG_POINTER_OWN |  0 );
  if (SWIG_IsNewObj(res2)) delete arg2;
  return vresult;
fail:
  if (SWIG_IsNewObj(res2)) delete arg2;
  return Qnil;
}

#save_checkpoint(*args, self) ⇒ Object



59203
59204
59205
59206
59207
59208
59209
59210
59211
59212
59213
59214
59215
59216
59217
59218
59219
59220
59221
59222
59223
59224
59225
59226
59227
59228
59229
59230
59231
59232
59233
59234
59235
59236
59237
59238
59239
59240
59241
59242
59243
59244
59245
59246
59247
59248
59249
59250
# File 'ext/cntk/cntk_wrap.cxx', line 59203

SWIGINTERN VALUE _wrap_Trainer_save_checkpoint(int nargs, VALUE *args, VALUE self) {
  int argc;
  VALUE argv[4];
  int ii;
  
  argc = nargs + 1;
  argv[0] = self;
  if (argc > 4) SWIG_fail;
  for (ii = 1; (ii < argc); ++ii) {
    argv[ii] = args[ii-1];
  }
  if (argc == 2) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = SWIG_AsPtr_std_wstring(argv[1], (std::wstring**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        return _wrap_Trainer_save_checkpoint__SWIG_1(nargs, args, self);
      }
    }
  }
  if (argc == 3) {
    int _v;
    int res = SWIG_ConvertPtr(argv[0], 0, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0);
    _v = SWIG_CheckState(res);
    if (_v) {
      int res = SWIG_AsPtr_std_wstring(argv[1], (std::wstring**)(0));
      _v = SWIG_CheckState(res);
      if (_v) {
        void *vptr = 0;
        int res = SWIG_ConvertPtr(argv[2], &vptr, SWIGTYPE_p_CNTK__Dictionary, 0);
        _v = SWIG_CheckState(res);
        if (_v) {
          return _wrap_Trainer_save_checkpoint__SWIG_0(nargs, args, self);
        }
      }
    }
  }
  
fail:
  Ruby_Format_OverloadedError( argc, 4, "Trainer.save_checkpoint", 
    "    void Trainer.save_checkpoint(std::wstring const &filePath, CNTK::Dictionary externalState)\n"
    "    void Trainer.save_checkpoint(std::wstring const &filePath)\n");
  
  return Qnil;
}

#test_minibatch(*args, self) ⇒ Float

Parameters:

Returns:

  • (Float)


24
25
26
# File 'lib/cntk/trainer.rb', line 24

def test_minibatch(args, device: DeviceDescriptor.use_default_device)
  __test_minibatchdata__(args, device)
end

#total_number_of_samples_seen(*args) ⇒ Object



59709
59710
59711
59712
59713
59714
59715
59716
59717
59718
59719
59720
59721
59722
59723
59724
59725
59726
59727
59728
59729
59730
59731
59732
59733
59734
59735
59736
59737
59738
59739
59740
59741
59742
59743
59744
59745
59746
59747
59748
59749
59750
59751
59752
59753
59754
59755
59756
59757
59758
59759
59760
# File 'ext/cntk/cntk_wrap.cxx', line 59709

SWIGINTERN VALUE
_wrap_Trainer_total_number_of_samples_seen(int argc, VALUE *argv, VALUE self) {
  CNTK::Trainer *arg1 = (CNTK::Trainer *) 0 ;
  void *argp1 = 0 ;
  int res1 = 0 ;
  std::shared_ptr< CNTK::Trainer const > tempshared1 ;
  std::shared_ptr< CNTK::Trainer const > *smartarg1 = 0 ;
  size_t result;
  VALUE vresult = Qnil;
  
  if ((argc < 0) || (argc > 0)) {
    rb_raise(rb_eArgError, "wrong # of arguments(%d for 0)",argc); SWIG_fail;
  }
  {
    swig_ruby_owntype newmem = {
      0, 0
    };
    res1 = SWIG_ConvertPtrAndOwn(self, &argp1, SWIGTYPE_p_std__shared_ptrT_CNTK__Trainer_t, 0 |  0 , &newmem);
    if (!SWIG_IsOK(res1)) {
      SWIG_exception_fail(SWIG_ArgError(res1), Ruby_Format_TypeError( "", "CNTK::Trainer const *","TotalNumberOfSamplesSeen", 1, self ));
    }
    if (newmem.own & SWIG_CAST_NEW_MEMORY) {
      tempshared1 = *reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      delete reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >(tempshared1.get());
    } else {
      smartarg1 = reinterpret_cast< std::shared_ptr< const CNTK::Trainer > * >(argp1);
      arg1 = const_cast< CNTK::Trainer * >((smartarg1 ? smartarg1->get() : 0));
    }
  }
  {
    try {
      result = ((CNTK::Trainer const *)arg1)->TotalNumberOfSamplesSeen(); 
    }
    catch (const std::runtime_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (const std::invalid_argument &e) {
      SWIG_exception(SWIG_ValueError,e.what()); 
    }
    catch (const std::logic_error &e) {
      SWIG_exception(SWIG_RuntimeError,e.what()); 
    }
    catch (...) {
      SWIG_exception(SWIG_UnknownError,"Runtime exception"); 
    }
  }
  vresult = SWIG_From_size_t(static_cast< size_t >(result));
  return vresult;
fail:
  return Qnil;
}

#train_minibatch(args, outputs: nil, device: DeviceDescriptor.use_default_device) ⇒ Object

Parameters:



7
8
9
10
11
12
13
14
15
16
17
18
19
# File 'lib/cntk/trainer.rb', line 7

def train_minibatch(args, outputs: nil, device: DeviceDescriptor.use_default_device)
  if outputs
    out = StdUMapVariableValue.new()
    outputs.each{|out_var|
      # By setting nullptr, Forward function implemented in C++ will allocate Value object with required storage.
      out.__set_nullptr__(out_var)
    }
    updated = __train_minibatchdata__(args, out, device)
    return [updated, out]
  else
    __train_minibatchdata__(args, device)
  end
end