Newsgroups: comp.ai.neural-nets
Path: cantaloupe.srv.cs.cmu.edu!rochester!udel-eecis!news.mathworks.com!newsfeed.internetmci.com!usenet.eel.ufl.edu!warwick!bham!sunserver1.aston.ac.uk!usenet
From: hazarikn@aston.ac.uk (N HAZARIKA)
Subject: JETNET-3.1
X-Nntp-Posting-Host: laplace.aston.ac.uk
Content-Type: text/plain; charset=us-ascii
Message-ID: <DvAy23.7ps@aston.ac.uk>
Sender: usenet@aston.ac.uk
Organization: Aston University, Neural Computing Research Group
X-Newsreader: knews 0.9.7
Mime-Version: 1.0
Date: Mon, 29 Jul 1996 11:18:03 GMT
Lines: 147

Hi,

Does anyone have experience using the neural net package JETNET-3.1?
I tried modifying the test routine JNTEST to solve the XOR problem using
two hidden units, but the results do not seem to come out right. I have
attached the driver routine below. Could some kind soul tell we what I am doing
wrong?

      PROGRAM MAIN
      PARAMETER(MAXI=1000,MAXO=1000)
      COMMON /JNDAT1/ MSTJN(40),PARJN(40),MSTJM(20),PARJM(20),
     &                OIN(MAXI),OUT(MAXO),MXNDJM
      SAVE /JNDAT1/
      PRINT*, 'METHOD'
      READ(*,*) METHOD
      CALL JNTDEC(METHOD)
      STOP
      END
C***********************************************************************

      SUBROUTINE JNTDEC(METHOD)
C...JetNet subroutine Test-DECk

C...The test-program uses method specified by METHOD.

      PARAMETER(MAXI=1000,MAXO=1000)

      COMMON /JNDAT1/ MSTJN(40),PARJN(40),MSTJM(20),PARJM(20),
     &                OIN(MAXI),OUT(MAXO),MXNDJM
      SAVE /JNDAT1/

      PARAMETER(INDIM=2,HIDDEN=2,NTRAIN=4,NEPOCH=100)
      DIMENSION TIN(NTRAIN,INDIM),TOUT(NTRAIN)


      WRITE(MSTJN(6),600)

      WRITE(MSTJN(6),*)

C...Generate data:
      WRITE(MSTJN(6),625)
      TIN(1,1)=1
      TIN(1,2)=0
      TOUT(1)=1

      TIN(2,1)=0
      TIN(2,2)=1
      TOUT(2)=1

      TIN(3,1)=1
      TIN(3,2)=1
      TOUT(3)=0
      
      TIN(4,1)=0
      TIN(4,2)=0
      TOUT(4)=0
*
*	Check to see if correct values are input
*
      DO 100 IPAT=1,NTRAIN
         WRITE(MSTJN(6),*) (TIN(IPAT,I),I=1,INDIM), TOUT(IPAT)
100   CONTINUE
      WRITE(MSTJN(6),626)

C...Set network architecture: MSTJN(1)-layered network with 
C...MSTJN(11) hidden nodes, MSTJN(12) output nodes and 
C...MSTJN(10) inputs.
      MSTJN(1)=3
      MSTJN(10)=INDIM
      MSTJN(11)=HIDDEN
      MSTJN(12)=1

C...Set sigmoid function: 
      MSTJN(3)=1

C...Choose updating method
      MSTJN(5)=METHOD

C...Initialize network:
      CALL JNINIT

*	Set learning rate PARJN(1)
        PARJN(1)=0.1

*	Set momentum PARJN(2)	
        PARJN(2)=0.9

C...Define the size of one epoch. Note that for batch training, the
C...number of patterns per update, MSTJN(2), must be set to the
C...total number of training patterns, and hence MSTJN(9), the
C...number of updates per epoch must be set to one.
        MSTJN(2)=NTRAIN
        MSTJN(9)=1

C...Other parameters keep their default values.

      WRITE(MSTJN(6),*)

C...Main loop over epochs:
      DO 300 IEPOCH=1,NEPOCH

C...Training loop:
        DO 310 IP=1,NTRAIN

            IPAT=IP

C...Put pattern into OIN:
          DO 320 I=1,MSTJN(10)
            OIN(I)=TIN(IPAT,I)
 320     CONTINUE
C...Put target output value into OUT:
          OUT(1)=TOUT(IPAT)

C...Invoke training algorithm:
          CALL JNTRAL


 310   CONTINUE

          IF(MOD(IEPOCH,10).EQ.0) THEN
C...  Display performance:
             DO 988 J=1,NTRAIN
               
                WRITE(MSTJN(6),*)IEPOCH,(TIN(J,I),I=1,INDIM),
     $               TOUT(J),OUT(J)
 988         CONTINUE
          ENDIF


          IF(MOD(IEPOCH,10).EQ.0) PAUSE
 300  CONTINUE
625   FORMAT(15X,'Generating training and test patterns...')
626   FORMAT(15X,'...done generating data.')
      RETURN

C**** END OF JNTDEC ****************************************************
      END


Cheers,

-- 
Neep Hazarika	Neural Computing Research Group, CSAM
		Aston University, Aston Triangle, Birmingham B4 7ET, U.K. 
Phone:	+44-121-359-3611 Ext. 4652 (work)	+44-121-554-8247 
Fax: +44-121-333-6215 
e-mail: n.hazarika@aston.ac.uk or hazarikn@aston.ac.uk
