delorie.com/archives/browse.cgi   search  
Mail Archives: djgpp/2003/01/28/00:53:09

Message-Id: <200301280550.h0S5ohlA017021@chac.its.uow.edu.au>
Date: Tue, 28 Jan 2003 16:55:43 +1000
From: Y Chen <yc12 AT uow DOT edu DOT au>
To: "djgpp AT delorie DOT com" <djgpp AT delorie DOT com>
Subject: scan() in c++
X-mailer: FoxMail 3.11 Release [cn]
Mime-Version: 1.0
Reply-To: djgpp AT delorie DOT com

Dear Sir,

I got the following errors(The file is at the bottom):

LSTM1.cpp: In member function `virtual char TLSTM::LoadPar()':
LSTM1.cpp:36: no matching function for call to `std::basic_fstream<char, 
   std::char_traits<char> >::scan(const char[17])'
LSTM1.cpp: In member function `char TLSTM::WriteWeightFile(char*)':
LSTM1.cpp:45: using typedef-name `std::iostream' after `class'


How to fix the problem about "no matching function..." and "using typedef-name `std::iostream' after `class' "? Thanks a lot for your help.

Regards,
Y Chen

 // LSTM.cpp

#include <iostream>
#include <cstdlib>
//#include <fstream>
using namespace std;

//#include <stdlib.h>
#include <stdio.h>
//#include <iostream.h>
#include <iomanip.h>
#include <new.h> // for memory error handling
//#include <ctype.h>
#include <unistd.h> // sleep 
//#include <termios.h> // struct termios
#include <fcntl.h>
#include <sys/param.h>
#include <math.h>
#include <errno.h>
#include <string.h>
#include <time.h>
#include <float.h> // To have max values for imposible inits.
#include "LSTM.h"


//// TLSTM


char TLSTM::LoadPar() {
  double NotUsed;
  if (TNeuralNetBase::LoadPar()) return 1;
  if (OpenFile(cParFilename, ios::in)) return 1;
  if (ReadComment()) return 1;
  // Scan for the starting point in the parameter file.
  //******
  pFile->scan("NbPredictNextIn:"); 
  return CloseFile();
}


char TLSTM::WriteWeightFile(char *FileName) {
  if (OpenFile(FileName, ios::out)) return 1;
  pFile->precision(PRECISION); 
  //******
	  WriteWeightStream(pFile,(void (*)(class iostream *, double &))&d2s,true);
  return CloseFile();
}

///////////////////main
main(int argc, char **argv)
{
  
}








// LSTM.h
#include <iostream.h>
#include <strstream.h>
#include "NeuralNetBase.h"

//// defines
// switch for all debugging info
//#define DEBUG
//#define DEBUG_ONLINE_TO_COUT
#undef PARFILE
#define PARFILE  "LSTM.par"
#define INNERSTATE_DUMP_LOGFILE "S_Dump.log"
#define FGGATE_Y_LOGFILE "Fg_y.log"
// Max number of MemoBlocks for a growing net.
#define MAX_MEMOBLOCKS 10
//#define NO_IN_OUT_SHORTCUTS
//#define CONNECT_GATES_TO_S
//#define CONNECT_TO_GATES
//#define CONNECT_GATES_OF_SAME_BLOCK
//#define USE_CELL_BIAS
//#define RECURRENT_OUTPUT
#define G_SIGMOID
//#define G_TANH
//#define G_RATIONAL_FUNC
//#define G_LINEAR
#define H_SIGMOID
//#define H_TANH
//#define H_RATIONAL_FUNC
//#define H_LINEAR
#define Yfg_SIGMOID 0
//#define Yfg_SHIFTEDPOLY
//#define RATIONAL_RECURSION
//#define USE_FORGET_GATES

//// classes in this header
class TLSTM;

////LSTM
class TLSTM : public TNeuralNetBase {
public:
  TLSTM() { strcpy(cParFilename, PARFILE); }
  ~TLSTM() {}
  virtual char Run();
private:
  void InitNet();
  virtual char LoadPar();
  void InitMemoBlocks(unsigned int BegBlock, unsigned int EndBlock);
  void DeleteNet();
  char AddMemoBlock(int BegSrcBlockNb, int EndSrcBlockNb, int MemoBlockSize);
  void AddMemoBlockToExistingBlocks(unsigned int BegBlock, 
				    unsigned int EndBlock);
  void ResetNet();
  // For testing and freezing set the blocks for which we need derivatives.
  void ForwardPass(struct TPatData &PatData,
		   unsigned int BegBlockDeriv, unsigned int EndBlockDeriv);
  void BackwardPass(unsigned int BegBlock, unsigned int EndBlock);
#ifdef UPDATE_WEIGHTS_AFTER_SEQ 
  void ExecuteWeightChanges(unsigned int BegBlock, unsigned int EndBlock);
#endif
  void PatStatistics(struct TPatData &PatData);
  void SeqStatistics(struct TPatData &PatData);
  void EpoStatistics(struct TPatData &PatData);
  unsigned int Test();
  char WriteWeightFile(char *FileName);
  // For display and fileIO.
  char WriteWeightStream(iostream *s, 
			 void(*f)(iostream *s, double &d),
			 bool WriteWhiteSpace);
  char LoadWeightFile(char *FileName);
  char DumpAll(char *FileName);
  void WriteNetStream(iostream *s, struct TPatData &PatData);
  void DisplayNet(struct TPatData &PatData);
  void DisplayWeights();
  char WriteOutLogFile(char *FileName);
  char Dump_Inner_States();
  char DumpSomeAlphas(); // debugging local learning.
  void TopoCheck();
  void SetTopoStatistics();
  char GetOnlineProblemStatistics();
  // Inline functions.
  // G().
#ifdef G_SIGMOID
  void G(double x,double &g) { 
    if(x>709) { x=709; } //cerr << Tri <<"- x>709 in G()\n"; g=2; return; }
    else if(x<-709){ x=-709; } //cerr << Tri <<"- x<709 in G()\n"; g=-2; return; }
    g=2*LOGC(x)-1; }
  void G_d(double g,double &gd) { gd=0.5*(1-g*g); }//gd=1-g*g*0.25; }
#endif
#ifdef G_TANH
  void G(double x,double &g) { g=tanh(x); }
 void G_d(double g,double &gd) { gd=(1-g*g); } // tanh_d
#endif
#ifdef G_RATIONAL_FUNC
  double g__frac;
  void G(double x,double &g) { g__frac=1/(1+fabs(x)); g=x*g__frac; }
  void G_d(double &gd) { gd=SQR(g__frac); }
#endif
#ifdef G_LINEAR
  void G(double x,double &g) { g=x; }
  void G_d(double &gd) { gd=1; }
#endif
  // H().
#ifdef H_SIGMOID
  void H(double s,double &h) { 
    if(s>709) { s=709; } //cerr << Tri <<"- s>709 in H()\n"; h=1; return; }
    else if(s<-709){ s=-709; }//cerr << Tri <<"- s<709 in H()\n"; h=-1; return; }
    h=2*LOGC(s)-1; }
  double H_d(double h) { return (0.5*(1-h*h)); }
#endif
#ifdef H_TANH
  void H(double s,double &h) { h=tanh(s); }
  double H_d(double h) { return (1-h*h); } // tanh_d
#endif
#ifdef H_RATIONAL_FUNC
  double h__frac;
  void H(double s,double &h) { h__frac=1/(1+fabs(s)); h=s*h__frac; }
  double H_d() { return (SQR(h__frac)); }
#endif
#ifdef H_LINEAR
  void H(double s,double &h) { h=s; }
  double H_d() { return 1; }
#endif
  // Yfg
#ifdef Yfg_SHIFTEDPOLY
  double Yfg__frac;
  void Yfg(double x,double &y) { Yfg__frac=1/(1+fabs(x)); y=x*Yfg__frac+1; }
  double Yfg_d(double y,double &df) { df=Yfg__frac*Yfg__frac; return df; }
#endif
#ifdef Yfg_SIGMOID
  void Yfg(double x,double &y) { 
    if(x>709) { x=709; }//cerr << Tri <<"- x>709 in Yfg()\n"; y=1; return; }
    else if(x<-709){ x=-709; }//cerr << Tri <<"- x<709 in Yfg()\n"; y=-1; return; }
    y= (1+Yfg_SIGMOID) * LOGC(x) - Yfg_SIGMOID; }// y=1/(1+EXP(-x)); }
  double Yfg_d(double y,double &df) {  //y*(1-y);
    df=1/(Yfg_SIGMOID+1)*(y+Yfg_SIGMOID)*(1-y); return df; }
#endif
  struct Ts_d_Gate { double s_d,alpha,h; }; // Here for follow. func..
  void NewTs_d_Gate(Ts_d_Gate *&sd, unsigned int size);
  void NewTs_d_Gate(Ts_d_Gate &sd); // For the biases.
  // Function for the derivative update.
#if defined(USE_FORGET_GATES) && defined(RATIONAL_RECURSION)
  double frac_abs_s_NegNet, frac_1_Sqr, frac_Sqr_s_NegNet;
#endif
  void Update_Cell_s_d(double &s_d,int iB, double y_t1) {
#ifdef USE_FORGET_GATES
#ifndef RATIONAL_RECURSION
    s_d *= MemoBlock[iB].FgGate.y;
#else
    s_d *= frac_1_Sqr;
#endif
#endif
    s_d += g_d__y_in*y_t1;
  }
  void Update_InGate_s_d(double &s_d, int iB, double y_t1) {
#ifdef USE_FORGET_GATES
#ifndef RATIONAL_RECURSION
    s_d *= MemoBlock[iB].FgGate.y;
#else
    s_d *= frac_1_Sqr;
#endif
#endif
    s_d += y_in_d__g*y_t1;
  }
#ifdef USE_FORGET_GATES
  void Update_FgGate_s_d(double &s_d, int iB, double y_t1) {
#ifndef RATIONAL_RECURSION
    s_d *= MemoBlock[iB].FgGate.y;
    s_d += y__fg_d__s_t1*y_t1;
#else
    s_d *= frac_1_Sqr;
    s_d += frac_1_Sqr * frac_Sqr_s_NegNet * y_t1;
#endif
  }
#endif
  // Fields.
  struct TCell {
    double net,g,s,s_t1,h,y,y_t1; // s_t1 for the forget gates.
    TWeight *w_In, **w_Cell, *w_InGate, *w_OutGate, w_Bias;   
    double *s_d_In, **s_d_Cell, *s_d_InGate, *s_d_OutGate, s_d_Bias;
    Ts_d_Gate  *s_d_InGate_In, *s_d_InGate_s, **s_d_InGate_Cell, 
      *s_d_InGate_InGate, *s_d_InGate_OutGate, s_d_InGate_Bias;
#ifdef USE_FORGET_GATES
    Ts_d_Gate  *s_d_FgGate_In,  *s_d_FgGate_s, **s_d_FgGate_Cell, 
      *s_d_FgGate_InGate, *s_d_FgGate_OutGate, s_d_FgGate_Bias;
#endif
    double e_unscaled,e;
  };
  double g_d,g_d__y_in,y_in_d__g,InGate_df,FgGate_df; // For calculations.
#ifdef USE_FORGET_GATES
  double y__fg_d__s_t1;
#endif
  struct TMemoBlock {
    unsigned int MemoBlockSize;
    // For not fully connected growing net.
    // InputBeginBlockNb<= b < InputEndBlockNb (if==-1 -> NbMemoBlocks).
    int BegSrcBlockNb, EndSrcBlockNb;
    struct {
      double net,y,y_t1;
      TWeight *w_In, *w_s, **w_Cell, *w_InGate, *w_OutGate, w_Bias; 
    } InGate;
    struct {
      double net,y,y_t1,e,df,delta;
      TWeight *w_In, *w_s, **w_Cell, *w_InGate, *w_OutGate, w_Bias; 
    } OutGate;
#ifdef USE_FORGET_GATES
    // No connections from the FgGate, so no y_t1 and also no self connection.
    struct {
      double net,y;//,y_t1;
#ifdef RATIONAL_RECURSION
      double exp_NegNet;
#endif
      TWeight *w_In, *w_s, **w_Cell, *w_InGate, *w_OutGate, w_Bias; 
    } FgGate;
#endif
    TCell *Cell;
  } *MemoBlock;
  struct TOut { 
    double net,y,e,df,delta;
    TWeight *w_In, **w_Cell, *w_Hidden, w_Bias;
  } *Out;
  struct THidden { // BP units.
    double net,y,e,df,delta;
    TWeight *w_In, **w_Cell, w_Bias;
  } *Hidden;
  struct TPredictNextIn { // BP units.
    double net,y,e,df,delta;
    TWeight *w_In, *w_InClass, **w_Cell;
  } *PredictNextIn;
  struct TNextIn { // Out units for PredictNextIn.
    double net,y,e,df,delta; 
    TWeight *w_PredictNextIn;
  } *NextIn;
  struct TPredictClass { // BP units.
    double net,y,e,df,delta;
    TWeight **w_Cell;
  } *PredictClass;
  struct TPredictClassOut { // Out units for PredictClass.
    double net,y,e,df,delta;
    TWeight *w_PredictClass;
  } *PredictClassOut;
  // Parameter for topology.
  unsigned int NbPredictNextIn;
  unsigned int NbPredictClass;
  unsigned int NbPredictOut; // To calculate MSE.
  // Use cell-cell, gate-to-cell  connection inside block.
  bool InternalBlockConect;
  unsigned int NbMemoBlocks; // The block size is stored in TMemoBlock.
  unsigned int FreezeEndBlock; // Lower Block number are frozen.
  // LSTM statistics.
  unsigned int NbCells, NbMemUnits;
  // debugging.
  //double ddd;
};






//NeuralNetBase.h
#include "PatternManagement.h"
#include "MathMacros.h"

//// defines
// Output file names. PARFILE should be redefined.
#define PARFILE  "NNB.par"
#define ERRORLOGFILE  "Error.log"
#define TEST_ERRORLOGFILE  "TestError.log"
#define MSELOGFILE  "MSE.log"
#define TEST_MSELOGFILE  "TestMSE.log"
#define GROWLOGFILE  "Grow.log"
#define WEIGHTLOGFILE  "Weight.log" 
#define DUMP_FILE  "dump" 
#define WEIGHTMEAN_LOGFILE  "WeightMean.log" 
#define ALPHAMEAN_LOGFILE  "AlphaMean.log"
#define OUT_LOGFILE  "Out.log"
#define ONLINEPATTICKS_LOGFILE "OnlinePatTicks.log"
// Algorithm.
//#define USE_LOCAL_ALPHA
//#define USE_LOCAL_ALPHA_SEQUENCEWISE
//#define MOMENTUM 0.99
//#define USE_WEIGHT_DECAY
//#define RESTRICT_WEIGHT_RANGE 100000
//#define USE_ALPHA_DECAY_IN_SEQ 5
//#define ALPHA_DECAY_IN_SEQ_LINEAR
//#define DO_WEIGHT_STATISTICS
//#define UPDATE_WEIGHTS_AFTER_SEQ
//#define DO_ONLINE_PAT_PRODUCTION 
// For TempOrder, TempXOR task:
//#define NO_RESET_AFTER_ONLINE_SEQ
// For Timer task:
//#define SETSTEPTARGET_BUT_COUNT_SEQUENCEWISE
//#define ONLINE_PAT_FUNCNAME Online_ReberGrammar
//#define ONLINE_PAT_FUNCNAME Online_SlidingNBitMemory
//#define ONLINE_PAT_FUNCNAME Online_TempOrderEx6
//#define ONLINE_PAT_FUNCNAME Online_AddMulti
#define ONLINE_PAT_FUNCNAME Online_Timer
#define PRECISION 10
//#define BUFFER_EPO_LOG
//#define LOG_LIKELIHOOD_GATES
//#define LOG_LIKELIHOOD_CELLS
//#define LOG_LIKELIHOOD_OUTPUT
//#define LINEAR_OUTPUT
//#define USE_ERROR_SPLIT

//// classes in this header
class TEpoLogBuf;
class TNeuralNetBase;

////TEpoLogBuf
// Buffer for the epo log files.
class TEpoLogBuf {
public:
  TEpoLogBuf();
  ~TEpoLogBuf();
  void Init(unsigned int MaxBuf, unsigned int aAverageBuf);
  void Reset();
  void AddValue(unsigned int iEpo, double aVal);
  unsigned int Max, NbVal, *BufEpo; 
  double *BufVal, *BufMin, *BufMax, ValMin, ValMax; 
  unsigned int AverageBuf, AverageCount;
};

////NeuralNetBase
class TNeuralNetBase : public TPatternGenerator {
public:
  TNeuralNetBase();
  ~TNeuralNetBase() {}
  virtual char Run();
protected:
  char Init();
  virtual char LoadPar();
  char WriteLogfileEpo(char *FileName, double val);
  char WriteLogfileEpo(char *FileName, double val0, double val1);
  char FlushEpoLogBuf(char *FileName, TEpoLogBuf &EpoLogBuf);
  char WriteOnlinePatTicks(char *FileName, double val);
  void KeyCheck();
  // Inline functions.
  void log_sig(double x,double &y) { 
    if(x>709){x=709;}//cerr << Tri <<"- x>709 in log_sig()\n"; y=1; return;}
    else if(x<-709){x=-709;}//cerr << Tri <<"- x<709 in log_sig()\n"; y=-1; return; }
    y=LOGC(x); }//y=1/(1+EXP(-x)); }
  double log_sig_d(double y,double &df) { df=y*(1-y); return df; }
  double log_sig_d(double y) { return(y*(1-y)); }
  double tanh_d(double y,double &df) { df=1-y*y; return df; }
  double tanh_d(double y) { return (1-y*y); }
  //{df=EXP(y); df=df/(1+df)*(1+df); return df;}
  //{ df=y*(1-y); return df; }
  // Parameter
  char cParFilename[101];
  unsigned int NbTrials;
  long RandomSeed;
  unsigned int MaxEpochs, EpochsAfterLearned, TestEpochs;
  unsigned int TestEach_Epo, TestMaxEach_Epo, LastTestEpo; // 0 -> no tests.
  double Alpha, AlphaBase, AlphaError, AlphaPredict, AlphaDecayInSeq;
  double InitWeightRange, OutUnitBiasInitWeight, HiddenUnitBiasInitWeight;
  bool GrowNet, GrowFullyNotCascade, GrowFirstOrder, FreezeAndGrow;
  bool SetStepTarget;
  // Parameter for topology.
  unsigned int NbIn, NbOut, NbHidden;
  // Parameter for Data,
  // To generate the pattern instead of loading them from a file.
  bool MixTrainSeqs, Generate_Pat;
  unsigned int Generate_Pat_Each_Epoch;
  unsigned int Generate_Pat_NbLoopAllSeqs_Test;
  unsigned int Generate_Pat_NbLoopAllSeqs_Train;
  char Pat_FuncName[256];
  // Variables for statistics.
  unsigned int PatWrong, ClassesWrong, TestClassesWrong;
  double PatCorrect, PatCorrectTest; // For continuous prediction.
  double MaxPatCorrectTrain; // To test only when improved.
  bool StopLern; 
  double ClassesWrongRel; 
  unsigned int ClassNb, WinnerNb;
  unsigned int LogErrRecSize, PartLogErrRecSize, NbLogedErr;
  double LogErrRecMean, PartLogErrRecMean;
  double MSEPat, MSESeq, MSEEpo, MSEStop, MaxMSE, TestMSEEpo;
  double WeightMean, AlphaMean, AlphaStd;
  double NbWeights;
  // Debug variables.
  double WeightsCount;// WPC;
  bool OutputDebug;
  // Global working variables (same as ).
  unsigned int Tri, Epo, TestEpo, Seq, SeqOnline, Pat, Val;
  unsigned int SeqOnlinePatCount; // For Dump_Inner_States numbering.
  bool FreezeSeqLoop; // For sequencewise online pat generation.
  // Variables for display and user interaction.
  char cKey;
  char cBuf[256];
  char cPrecision, cWidth;
  // Buffer
#ifdef BUFFER_EPO_LOG
  TEpoLogBuf TrainMSEEpoLogBuf, TrainErrEpoLogBuf, 
    TestErrEpoLogBuf, TestMSEEpoLogBuf;
#endif
  unsigned int AverageTrainBuf; // Alwaus defined, for the .par file.
  // Defines for the local learning rate.
  double Alpha_ROH, Alpha_MUE;
  struct TWeight { double w;
#ifdef UPDATE_WEIGHTS_AFTER_SEQ
    double Dw;
#endif
#ifdef MOMENTUM
    double Dw_t1;
#endif    
    double alpha,h; }; // For Nic's K1.
  void NewWeight(TWeight *&pW, unsigned int size);
  void NewWeight(TWeight *&pW, unsigned int size, double InitWeight);
  void NewWeight(TWeight &W, double InitWeight);
  void NewWeight(double *&pW, unsigned int size);
  void AdjustAlphaAndWeights(double delta,double  df,
			     double yt1,TWeight &W) {
    double dw;
#ifndef USE_LOCAL_ALPHA
    dw = W.alpha * delta * yt1;
#ifdef MOMENTUM
    dw += MOMENTUM * W.Dw_t1; W.Dw_t1 = dw;
#endif    
#ifdef USE_ALPHA_DECAY_IN_SEQ
    dw *= AlphaDecayInSeq;
#endif
#ifdef USE_WEIGHT_DECAY
    // Weight decay.
    if(((W.w>0)&&(dw>0))||((W.w<0)&&(dw<0))) dw *=0.9;
#endif
#ifdef UPDATE_WEIGHTS_AFTER_SEQ
    W.Dw += dw;
#else
    W.w += dw;
#endif
    //W.w *= 0.999;
#else
    double delta_yt1 = delta*yt1;
    double df_yt1 = df*yt1;
    // Calc alpha.
    double bracket = 1+Alpha_MUE*W.h*delta_yt1; 
    if(bracket<Alpha_ROH) bracket=Alpha_ROH;
    W.alpha *= bracket;
    // Update w.
    dw = W.alpha * delta_yt1; W.w += dw;
    // Calc h.
    double k = W.alpha * df_yt1;
    bracket = 1 - k * df_yt1; if(bracket<0) bracket=0;
    W.h = W.h * bracket + dw;
#endif
#ifdef DO_WEIGHT_STATISTICS
    // Some global statistics.
    WeightMean += fabs(W.w);
    //AlphaMean += W.alpha; AlphaStd += SQR(W.alpha); //WPC++;
#endif
#ifdef RESTRICT_WEIGHT_RANGE
    if(fabs(W.w)>RESTRICT_WEIGHT_RANGE) W.w=RESTRICT_WEIGHT_RANGE;
#endif
  }
  void RTRLAdjustAlphaAndWeights(double s_d,double e,double &alpha,
				 double &h,TWeight &W) {
    double dw;
#ifndef USE_LOCAL_ALPHA
    dw = alpha * s_d * e;
#ifdef MOMENTUM
    dw += MOMENTUM * W.Dw_t1; W.Dw_t1 = dw;
#endif    
#ifdef USE_ALPHA_DECAY_IN_SEQ
    dw *= AlphaDecayInSeq;
#endif
#ifdef USE_WEIGHT_DECAY
    // Weight decay.
    if(((W.w>0)&&(dw>0))||((W.w<0)&&(dw<0))) dw *=0.9;
#endif
#ifdef UPDATE_WEIGHTS_AFTER_SEQ
    W.Dw += dw;
#else
    W.w += dw;
#endif
#else
    // If USE_LOCAL_ALPHA. Calc alpha.
    double bracket = 1+Alpha_MUE*h*e*s_d;
    if(bracket<Alpha_ROH) bracket=Alpha_ROH;
    alpha *= bracket;
    // Update w.
    double k = alpha * s_d;
    dw = e * k; W.w += dw;
    // Calc h.
    bracket = 1 - k * s_d; if(bracket<0) bracket=0;
    h = h * bracket + dw;
#endif
#ifdef DO_WEIGHT_STATISTICS
    // Some global statistics.
    WeightMean += fabs(W.w);
    //AlphaMean += W.alpha; AlphaStd += SQR(W.alpha); //WPC++;
#endif
#ifdef RESTRICT_WEIGHT_RANGE
    if(fabs(W.w)>RESTRICT_WEIGHT_RANGE) W.w=RESTRICT_WEIGHT_RANGE;
#endif
  }
#ifdef UPDATE_WEIGHTS_AFTER_SEQ  
  void ExecuteWeightChange(TWeight &W) { W.w += W.Dw; W.Dw=0; }
#endif
};








//PatternManger.h

#include <ctype.h>
#include <time.h>
#include "IOBase.h"

/*** defines ***/

// switch for all debugging info
//#define DEBUG// file names
#define TRAINPATFILE "Train.pat"
#define TESTPATFILE  "Test.pat"
#define RESULTTRAINPATFILE "ResultTrain.pat"
#define RESULTTESTPATFILE "ResultTest.pat"

/*** classes in this header ***/
class TPatternManager;
class TPatternGenerator;

///////////////////TPatternManager
// a pattern has input output lists for one time step
// a sequece is a list of pattern
// TPatternManager manages a list of sequences
// the number of inputs and outputs are constant for all 
// pattern in all sequence,
// it has to be the same as the number of in- and output units in the net
// In the pattern file sequences are devided by "\n\n", pattern by "\n".
// and in-out values by "\t" or ' '. No global parameters. The number of
// values in a pattern must be constant for the whole file.
class TPatternManager : public TIOBase {
public:
  TPatternManager();
  ~TPatternManager();
  char cTrainPatFilename[101]; // training pattern
  char cTestPatFilename[101];  // test pattern
  char cTrainPatFilenameResult[101]; // mixed training pattern
  char cTestPatFilenameResult[101]; // mixed training pattern
  char LoadTrainPattern();
  char LoadTestPattern();
  // One class one Output classification.
  char InitPatternClassification(unsigned int NbIn, unsigned int NbOut);
  char SaveTrainPattern(); // for debugging and Id'tk
  char SaveTestPattern(); // for debugging and Id'tk
  void MixTrainSequences();
  // The PatData is public to give the UI direct access for faster dispaly.
  struct TPatData { // at least one for train and one for test
    unsigned int  NbSeq; 
    unsigned int* NbPat; // number of pattern in each sequence[NbSeq]
    unsigned int  NbPatTotal;
    unsigned int* NbClass; // classification of sequence[NbSeq]
    unsigned int  NbVal; // number of values in each pattern
    double*** SeqList; //[NbSeq][NbPat[NbSeq]][NbVal] set NULL in constructor
  } sTrainData, sTestData;
  unsigned int iVal, iPat, iSeq; // i for pattern management (pub to see).
protected:
  void CountNbPatTotal(struct TPatData &apsPatData);
  // If the *NbPat == NULL take the const lenght 
  char NewPatData(struct TPatData &apsPatData, unsigned int ConstNbPat);
  // For destructor and to reload pattern.
  void DeletePattern(struct TPatData &apsPatData);
  char InitPatternClassification(struct TPatData &apsPatData,
				 unsigned int NbIn, 
				 unsigned int NbOut);  
  void CopyPatData(struct TPatData &apsPatDataDst, 
		   struct TPatData &apsPatDataSrc);
  void MixSequences(struct TPatData &apsPatData);
  void TransPat2StepByStepPredictALL(unsigned int NbInUnits,
				     unsigned int NbOutUnits);
  void TransPat2StepByStepClassiALL(unsigned int NbInUnits,
				    unsigned int NbOutUnits);
  void TransPat2UseContextNet(unsigned int NbInUnits,
			      unsigned int NbOutUnits) {}
  char SavePatternFileSeppFormat(struct TPatData &apsPatData, 
				 char* acPatFilename, 
				 unsigned int NbInUnits);
private:
  char LoadPattern(struct TPatData &apsPatData, char* acPatFilename);
  char SavePatternFile(struct TPatData &apsPatData, char* acPatFilename);
  void TransPat2StepByStepPredict(unsigned int NbInUnits,
			     unsigned int NbOutUnits,
			     struct TPatData &apsPatData);
  void TransPat2StepByStepClassi(unsigned int NbInUnits,
			     unsigned int NbOutUnits,
			     struct TPatData &apsPatData);
  unsigned int  NbValTemp; // to confirm file consistency
  char cScanned; // for scanning
  float f; // for scanning
  unsigned int i,ii,iii,iv; // for loops
};

///////////////////TPatternGenerator
// Collection of problem. Replaces TPatternManger for applications that
// that need data generation.
class TPatternGenerator : public TPatternManager {
public:
  TPatternGenerator() { NbLoopAllSeqs = 1; RefData=NULL; ForONLINE = false; }
  ~TPatternGenerator() {}
  void Init_Rand_Seed();
  void GeneratePattern(struct TPatData &apsPatData, const char* FunktionName);
  void Set_LoopAllSeqs(unsigned int ui) { NbLoopAllSeqs = ui; }
  // e.g. set the TrainData as reference to generate TestData (NULL to unset)
  void Set_RefData(struct TPatData &apsPatData) { RefData=&apsPatData; }
protected:
  void New_OnlinePatData(struct TPatData &apsPatData, unsigned int MaxNbPat);
  // For Online pat by pat.
  void PatByPat_Reber(double out); // works on the train data.
  // We set the sizes of the PatData in the functions that generate them.
  void LongVeryNoisyTimesSeries(struct TPatData &apsPatData);
  void TempOrderEx6(struct TPatData &apsPatData);
  void TempXOR(struct TPatData &apsPatData);
  void Adding(struct TPatData &apsPatData);
  void Laufband(struct TPatData &apsPatData);
  void EmbeddedReberGrammar(struct TPatData &apsPatData);
  void Sinus(struct TPatData &apsPatData);
  void SpaceToTime(struct TPatData &apsPatData);
  // Online problems that generate one pattern or sequence at a time. 
  unsigned int MaxOnlineStreamLengthTrain,
    MaxOnlineStreamLengthTest; // target for online Pat generattion.
  unsigned int PatCount;// For online pat generation statistcs.
  void Online_TempOrderEx6(struct TPatData &apsPatData,
			   bool SetBackToSeqStart);
  void Online_ReberGrammar(struct TPatData &apsPatData,
			   bool SetBackToSeqStart);
  // For on-line production, protected to reset the net.
  bool ForONLINE; // If online method uses (calls) generation method.
  char ReberGrammarState; 
  char EmbeddedReberGrammarState; 
  void Online_SlidingNBitMemory(struct TPatData &apsPatData,
				bool SetBackToSeqStart);
  void Online_AddMulti(struct TPatData &apsPatData,
				bool SetBackToSeqStart);
  void Online_Timer(struct TPatData &apsPatData,
				bool SetBackToSeqStart);
  double AddMultiVal, AddMultiLastVal;
#define SlidingNBitMemorySize 4
  double SlidingNBitMemory[SlidingNBitMemorySize];
  char SlidingNBitMemoryPointer;
  // variables protected to log the Temp order (Exp 6).
  int t1,t2,t3,t4,tick;
  int XNotYt1,XNotYt2,XNotYt3,XNotYt4; 
private:
  unsigned int NbLoopAllSeqs;
  struct TPatData *RefData;
  // The next possible outputs.
  enum RberGrammarSymbol {B,T,P,S,X,V,E} RberGrammarSymbol_p1;
  unsigned int i,ii,iii;// for loops
};






// MathMacros.h
#include <math.h>
//#include <machine/endian.h>

#define SQR(x) ((x) * (x))

// macro for fast exp
static union {
  double d;
  struct {
    // machine dependent storage of multi-byte quantities.
#ifdef LITTLE_ENDIAN
    int j, i;
#else
    int i, j;
#endif
  } n;
} _eco, _eco2;
 
#define EXP(y) ((_eco.n.i = (int)(1512775*(y)) + 1072632448,_eco.n.j=0),\
 _eco.d)


/* logistic: logc(y) = EXP(y/2)/(EXP(y/2) + EXP(-y/2)) */      

/* auxiliary constants */

#define POW_2_20 (1 << 20) 
#define POW_1023 (1023*POW_2_20)
#define POW_2045 (2045*POW_2_20)

#define EXP_A (POW_2_20/M_LN2)  /* use 1512775 for int */

#define LOGC(y) (_eco2.n.i = (int)(POW_1023 - (EXP_A/2)*(y)), \
    _eco.n.i = (POW_2045 + POW_2_20) - _eco2.n.i, _eco.d/(_eco.d + _eco2.d))






 

//IOBase.h
#include <fstream.h>

/*** defines ***/
// switch for all debugging info
//#define DEBUG

/*** classes in this header ***/
class TIOBase; 

///////////////////TTIOBase
// a container class for trival common variables and functions.
// Handle the fileIO.
class TIOBase
{
public:
  // We leave the standart constructor empty, and do all the initializations
  // in the other one. The empty one is necessairy for TTopoCreator and
  // TPatternManager, becase their constructors are empty too. But as thier
  // classes exist only virtually for TRNN, the empty constructor of 
  // TIOBase will never be called. So no one will realize that it doesn't
  // do its work properly.
  TIOBase();
  ~TIOBase();
protected:
  // possible Mode parameters are defined in clsss ios (ios::out,in,...)
  // The childs are responsible closing and deleting the file again. 
  char OpenFile(char* acFilename, int aiMode);
  char OpenFile(fstream *apFile, char* acFilename, int aiMode);
  char CloseFile();
  char CloseFile(fstream *apFile);
  char ReadComment();
  bool DirContainsFile(bool SubSearch, char *fn);
  int Keypressed(char &ac);
  // Double to stream and the other way round.
  static void d2s(iostream *s, double &d) { *s << d; }
  static void s2d(iostream *s, double &d) { *s >> d; }
  // For long doubles.
  static void ld2s(iostream *s, long double &d) {
    double dd; dd=(double)d; *s << dd; } 
  static void s2ld(iostream *s, long double &d) {
    double dd; *s >> dd; d=dd; } 
  // We keep only one fstream object alive and open - close the files
  // instead of new - delete an fstream object every time we read a file.
  fstream *pFile; // common stream pointer for all childs
  long lFilePos; // to rescan the file begining after the global section
  char c;  // to read comments
private:
  char cFilename[101];
  int iFileRdState; // the status of the file.
};



- Raw text -


  webmaster     delorie software   privacy  
  Copyright © 2019   by DJ Delorie     Updated Jul 2019