Newsgroups: comp.ai.neural-nets
Path: cantaloupe.srv.cs.cmu.edu!das-news2.harvard.edu!news2.near.net!news.mathworks.com!uhog.mit.edu!news.mtholyoke.edu!news.umass.edu!umassd.edu!umassd!s_vogt
From: s_vogt@umassd.cis.umassd.edu (Stefan Vogt)
Subject: Re: Request: Software Engineering & ANN
Message-ID: <D6r4FK.69r@umassd.edu>
Sender: usenet@umassd.edu (USENET News System)
Organization: University of Massachusetts Dartmouth
X-Newsreader: TIN [version 1.2 PL2]
References: <D6MnCn.2KH@wst.edvz.sbg.ac.at>
Date: Sun, 9 Apr 1995 04:57:17 GMT
Lines: 574


Roland Schwaiger (rschwaig@wst.edvz.sbg.ac.at) wrote:
: Hello!
:  
: I am looking for literature concerning the
: modeling of ANN from a software engineering
: point of view. Can anybody give me some hints?
Well, my master thesis is dealing with that issue and it took me some
time to find something. I copied my bibtex file. 
  
: I will summarize for the group!
Nice!

If you like to, we can stay in contact. My thesis will hopefully be
finished this semester. The first draft of a comparison of existing
development methods for ANN is ready now. 

Bye, Stefan
                                           -----------------------------
                                           "Good news is just life's way
                                            of keeping you off balance."


!#@$%#^$%^*&^%*$%^&(+~$#^@&)*%&%^%#@^*)^#@!$&*(?<|+),&%^$@$(_*_-\|?,<#@^%

Stefan Vogt                            http://www.cis.umassd.edu/~s_vogt/
Cedar Dell 5357                        August-Knabe-Weg 4
UMass Dartmouth                        59494 Soest
North Dartmouth, MA 02747-2300         Germany
+ (508)-990-9190                       +49 (2921)-62244
s_vogt@cis.umassd.edu                  s_vogt@ira.uka.de

--------------------------------------------------------------------------

The comments in the abstract are my opinion. I appologize for any
unpoliteness in advance ;-))

%development methods
@Article{BaT90a,
  author = 	 "David L.~Bailey and Donna M.~Thompson",
  title = 	 "How to develop neural-network applications",
  journal =	 "AI Expert",
  year =	 1990,
  volume =	 5,
  number =	 6,
  pages =	 "38--47",
  month =	 "June",
  abstract = 	 "Lifecycle related development model. This articel
		  covers the concept and design phase. In
		  \cite{BaT90b} following phases are described. The
		  authors give an overview of design parameters and a
		  bottom-up method how to chose them."
}
@Article{BaT90b,
  author = 	 "David L.~Bailey and Donna M.~Thompson",
  title = 	 "Developing neural-network applications",
  journal =	 "AI Expert",
  year =	 1990,
  volume =	 5,
  number =	 9,
  pages =	 "34--41",
  month =	 "September",
  abstract = 	 "READ"
}
@InBook{KaN93,
  author = 	 "Nicolaos B.~Karayiannis and Ansastasios N.~Venetsanopoulos",
  title = 	 "Artificial neural network: Learning algorithms,
		  performance evaluation, and application",
  chapter = 	 "8.2",
  publisher =	 "Kluwer Academic",
  year =	 1993,
  pages =	 "300--315",
  address =	 "Norwell, MA",
  abstract = 	 "This chapter deals with a method how to develop in
		  practise an ANN prototype. It is far away from a
		  life cycle."
}
@Book{MeL94,
  author = 	 "Larry Medsker and Jay Liebowitz",
  title = 	 "Design and development of expert systems and neural
		  networks",
  publisher = 	 "Macmillan College Publishing Company",
  year = 	 1994,
  address =	 "New York",
  abstract = 	 "Gives similar methodologies for expert and ANN
		  system design. Covers problems of both
		  approaches. The hybrid system is suggested and
		  described."
}
@Book{NeI91,
  author = 	 "Marilyn McCord Nelson and W.~T.~Illingworth",
  title = 	 "A practical guide to neural nets",
  publisher = 	 AW,
  year = 	 1991,
  address =	 AWadr,
  note =	 "book with disk: ISBN 0-201-52376-0; only book: ISBN
		  0-201-56309-6",
  abstract = 	 "Practical introduction in ANN. Gives examples and
		  hints for designing ANN. Also discusses the world,
		  important people and firms in the field. The
		  suggested design method is like something from scratch."
}
@InProceedings{WhS92,
  author = 	 "G.~Whittington and C.~T.~Spracklen",
  title = 	 "A structured design, development and integration
		  methodology for real-world applications of
		  artificial neural networks",
  editor =	 "Igor Aleksander and John Taylor",
  volume =	 2,
  pages =	 "1245--1252",
  booktitle =	 "Artificial neural networks: proceedings of the 1992
                  International Conference on Artificial Neural Networks
		  (ICANN-92), Brighton, United Kingdom",
  year =	 1992,
  publisher =	 "North-Holland",
  address =	 "Amsterdam, Netherlands",
  month =	 "September",
  abstract = 	 "Up to now the most detailed design methodology for
		  ANN. It suggests a life cycle prototype development."
}

%design methods

@InProceedings{AAT93,
  author = 	 "E.~Alba and J~F.~Aldana and J.~M.~Troya",
  title = 	 "Full automatic ANN design: A genetic approach",
  editor =	 "Jose Mira and Joan Cabestany and Alberto Prieto",
  pages =	 "399--404",
  booktitle =	 "New trends in neural computing: International
		  workshop on artificial neural networks (IWANN '93),
		  Sitges, Spain",
  year =	 1993,
  publisher =	 "Springer-Verlag",
  address =	 "New York",
  month =	 "June",
  abstract = 	 "READ"
}
@Article{And90,
  author = 	 "James A.~Anderson",
  title = 	 "Data Representation in neural networks",
  journal =	 "AI Expert",
  year =	 1990,
  volume =	 5,
  number =	 6,
  pages =	 "30--41",
  month =	 "June",
  abstract = 	 "Investigates human learn and storage behavior and
		  copies it to ANN. The bar representation
		  (statevector coding) is suggested to be the most
		  suitable for ANN because of similar constrains
		  observed with humans."
}
%not used yet
@inproceedings{BKHS93,
  author        ="Boers, E.J.W.  and Kuiper, H.  and Happel,
		  B.L.M. and Sprink huizen-Kuyper, I.G.", 
  booktitle     ="Proceedings of Computing Science in The Netherlands",
  title         ="{D}esigning modular artificial neural networks",
  year          ="1993",
  abstractURL   ="http://www.wi.leidenuniv.nl/TechRep/tr93-24.html",
  address       ="SION, Stichting Mathematisch Centrum",
  documentURL   ="file://ftp.wi.LeidenUniv.nl/pub/CS/TechnicalReports/1993/tr93
-24.ps.gz",
  editor        ="Wijshoff, H.A. ",
  pages         ="87--96",
}
@Article{Cau91,
  author = 	 "Maureen Caudill",
  title = 	 "Neural network training tips and techniques",
  journal =	 "AI Expert",
  year =	 1991,
  volume =	 6,
  number =	 1,
  pages =	 "56--61",
  month =	 "January",
  abstract = 	 "Backprop Feedforward training tips. 1) multiple
		  random start 2) random shake 3) small nets <5layers
		  4) use momentum 5) add noise to generalise better 6)
		  more tolerance in error 7) enlarge net in middle
		  layer not more than 10% and try again."
}
%not used yet
@phdthesis{Coz95,
  author        ="Cozzio, Rico ",
  school        ="Institute of Logic and Computer Science,
		  Dept. Mathematics, ETHZ, Zurich", 
  title         ="The design of neural networks using a priori
		  knowledge",
  year          =1995,
  abstractURL   ="ftp://ftp.inf.ethz.ch/doc/diss/th10991.abstract",
  documentURL   ="ftp://ftp.inf.ethz.ch/doc/diss/th10991.ps"
}
%not used yet
@InProceedings{DrW93,
  author = 	 "Stephan Dreiseitl and Dongming Wang",
  title = 	 "Automatic Generation of {C++} code for neural network
		  simulation",
  editor =	 "Jose Mira and Joan Cabestany and Alberto Prieto",
  pages =	 "358--363",
  booktitle =	 "New trends in neural computing: International
		  workshop on artificial neural networks (IWANN '93),
		  Sitges, Spain",
  year =	 1993,
  publisher =	 "Springer-Verlag",
  address =	 "New York",
  month =	 "June",
  abstract = 	 "A Maple like grammar is defined to describe
		  backpropagation nets. Given a language in this
		  grammar an algorithms generates C++ learning code."
}
%not used yet
@Article{BoG93, 
  author = 	 "N.~K.~Bose and A.~K.~Garga",
  title = 	 "Neural network design using Voronoi diagrams",
  journal =	 IEEE:TNN,
  year =	 1993,
  volume =	 4,
  number =	 5,
  pages =	 "778--787",
  month =	 "September",
  abstract =     "Voronai diagrams are computed to cover the input
		  data. From this follows a defined architecture,
		  weights, and thresholds for multilayer feedforward
		  nets."
}

@InProceedings{HaS93,
  author = 	 "Babak Hassibi and David G.~Stork",
  title = 	 "Second order derivatives for network pruning:
		  Optimal brain surgeon",
  editor =	 "Stephen Jos\'e Hanson and Jack D.~Cowan and C.~Lee Giles",
  volume =	 5,
  pages =	 164,
  booktitle =	 "Advances in Neural Information Processing Systems",
  year =	 1995,
  publisher =	 "Morgan Kaufmann",
  address =	 "San Mateo, CA",
  ISBN = 	 "1049-5258",
  ISNN = 	 "1-55860-274-7",
  abstract = 	 "Effective and paractical approach to decrease the
		  size of a trained feed forward network."
}

@InCollection{JoF90,
  author = 	 "Dan Jones and Stanley P.~Franklin",
  title = 	 "Choosing a network: Matching the architecture to the
		  application",
  booktitle =	 "Handbook of neural computing applications",
  publisher =	 "Academic Press",
  year =	 1990,
  editor =	 "Alianna Maren and Craig Harston and Robert Pap",
  chapter =	 14,
  pages =	 "219--232",
  address =	 "San Diego, CA",
  abstract = 	 "Relatively short (only stepwise) development
		  method. Application perspective in question of
		  finding a network paradigm."
}

@Book{Mas93,
  author = 	 "Timothy Masters",
  title = 	 "Practical neural network recipes in {C++}",
  publisher = 	 "Academic Press",
  year = 	 1993,
  address =	 "San Diego, CA",
  abstract = 	 "Practical problem solution approach. Only
		  Feedforward networks. Also genetic algorithm for
		  weight adjustment."
}
@Article{NRU+94, 
  author = 	 "O.~Nerrand and P.~Roussel-Ragot and D.~Urbani and
		  L.~Personnanz and G.~Dreyfus",
  title = 	 "Training recurrent neural networks: {W}hy and how?
		  {A}n illustration in dynamical process modeling",
  journal =	 IEEE:TNN,
  year =	 1994,
  volume =	 5,
  number =	 2,
  pages =	 "178--184",
  month =	 "March",
  abstract = 	 "General approach for the training of recurrent
		  neural networks by gradient-based
		  algorithm. Devolepment of four families of
		  algorithms. The critical choice of the training
		  algorithm is illustrated by a selection scheme for
		  process modeling."
}

%Number of hidden neurons and other informal rule of thumbs
@Misc{Sar95,
  author =	 "Warren S.~Sarle",
  title =	 "On computing number of neurons in hidden layer\dots",
  howpublished = "newsgroup: comp.ai.neural-nets",
  year =	 1995,
  month =	 "February",
  note =	 "SAS Institute Inc., Cary, NC 27513; saswss@unx.sas.com",
  abstract = 	 "Dependencies for number of nodes in hidden layer."
}


@Article{GuK91,
  author = 	 "John P.~Guiver and Casimir C.~Klimausauskas",
  title = 	 "Applying Neural Networks, Part IV: Improving Performance",
  journal =	 "PC AI",
  year =	 1991,
  month =	 "July/August",
  abstract = 	 "get it"
}

% not used yet:
@Article{Bar93,
  author = 	 {A.R.~Barron},
  title = 	 {Universal Approximation Bounds for Sup
		  V. Vysniauskas and F. C. A. Groen and B. J. A. Krvse
		  (Sep. 1993) a Sigmoidal Function},
  journal = 	 {IEEE Transactions on Information Theory},
  year =  	 {1993},
  volume =  	 {39},
  number =  	 {3},
  pages =  	 {930--945},
  month =  	 {May}
}
@Article{Cro92,
  author = 	 "Ted Crooks",
  title = 	 "Care and feeding of neural networks",
  journal =	 "AI expert",
  year =	 1992,
  volume =	 "?",
  number =	 "?",
  pages =	 "37--41",
  month =	 "July",
  abstract = 	 "Recomends strongly data analysis and network preparation."
}
% not used yet:
@TechReport{VGK93,
  author = 	 "V. Vysniauskas and F. C. A. Groen and B. J. A. Krose ",
  title = 	 "The optimal number of learning samples and hidden
		  units in function approximation with a feedforward network",
  institution =  "Dept. of Comp. Sys, Univ. of Amsterdam",
  year = 	 1993,
  number =	 "CS-93-15",
  month =	 "November",
  abstract = 	 "This paper presents a method to estimate the optimal
		  number of learning samples and the number of hidden
		  units for a function approximation by a feedforward
		  network.  The optimality is considered under the
		  minimal learning time constraint for a given degree
		  of accuracy which is an essential point for
		  real-time learning. The approximation error is
		  modeled as a function of the number of hidden units
		  and the number of learning samples. Two models are
		  presented: the first one is based on general bounds
		  of approximation and the second one on an asymptotic
		  expansion of the approximation error. This approach
		  was applied to optimize the learning of the
		  camera-robot mapping of a visually guided robot arm
		  and a complex logarithm function approximation. The
		  results of this investigation suggested that the
		  actual approximation errors differ considerably from
		  the theoretical upper bounds."
}

%ANN general

@Article{BlR92,
  author = 	 "Avrim L.~Blum and Ronald L.~Rivest",
  title = 	 "Training a 3-node neural network is {NP}-complete",
  journal =	 "Neural Networks",
  year =	 1992,
  volume =	 5,
  number =	 1,
  pages =	 "117--127",
  month =	 "?",
  abstract = 	 ""
}

@Book{CaB90,
  author = 	 "Maureen Caudill and Charles Butler",
  title = 	 "Naturally intelligent systems",
  publisher = 	 "MIT Press",
  year = 	 1990,
  address =	 "Cambridge, MA",
  abstract = 	 "READ"
}
%not used yet
@article{Fie94,
  author        ="Fiesler, E.",
  journal       ="Computer Standards and Interfaces",
  title         ="Neural network classification and formalization",
  year          =1994,
  number        =3,
  pages         ="231--239",
  volume        =16
}
@Book{Hec91,
  author = 	 "Robert Hecht-Nielsen",
  title = 	 "Neurocomputing",
  publisher = 	 AW,
  year = 	 1991,
  address =	 AWadr,
  abstract = 	 "Appendix dealing with project managment."
}
@techreport{Heg93,
  author        ="Hegt, J. A.",
  institution   ="Eidhoven University of Technology",
  title         ="{H}ardware implementations of neural networks",
  year          =1993,
  abstractURL   ="file://ftp.urc.tue.nl/pub/neural/hardware_general.abstract",
  documentURL   ="file://ftp.urc.tue.nl/pub/neural/hardware_general.ps.gz",
  month         ="November"
}
@Book{Hry92,
  author = 	 "Thomas Hrycej",
  title = 	 "Modular learning in neural networks: A modularized
		  approach to neural network classification",
  publisher = 	 "John Wiley \& Sons",
  year = 	 1992,
  series =	 "Sixth-generation computer technology series",
  address =	 "New York",
  abstract = 	 "READ"
}
@Unpublished{IEEE95,
  author = 	 "{Technical commitees of the IEEE Societies} and {the
		  Standards Coordinating Commitees of the IEEE
		  Standards Board}",
  title = 	 "{IEEE} recommended definition of terms for artificial
		  neural networks",
  key =          "IEEE95",
  note = 	 "Draft and not yet approved",
  year =	 1995,
  month =	 "February"
}
% not used yet
@techreport{JonesMJ92a,
  author        ="Jones, M. J.",
  institution   ="MIT",
  title         ="Using recurrent retworks for dimensionality
		  reduction",
  year          =1992,
  documentSize  = "604.9 kbytes",
  documentURL   =
		  "ftp://publications.ai.mit.edu/ai-publications/1992/AITR-1396.ps.Z", 
  month         ="September",
  number        ="AITR-1396"
}
@Article{Kos87,
  author = 	 "B.~Kosko",
  title = 	 "Adaptive bidirectional associative memories",
  journal =	 "Appl. Opt.",
  year =	 1987,
  volume =	 26,
  number =	 23,
  pages =	 "4947--4960",
  month =	 "December"
}
@Article{Lip87,
  author = 	 "R.~P.~Lippmann",
  title = 	 "An introduction to Computing with neural nets",
  journal =	 "Acoustics, Speech, and Signal Processing Magazine",
  year =	 1987,
  volume =	 4,
  number =	 2,
  pages =	 "4--22",
  month =	 "April",
  abstract = 	 "One of the standard introductional references."
}

@Book{MeW92,
  author = 	 "Pankja Mehra and Benjamin W.~Wah",
  title = 	 "Artificial Neural Networks: {C}oncepts and theory",
  publisher = 	 IEEE:CSP,
  year = 	 1992,
  address =	 IEEE:CSPadr,
  abstract = 	 "A selection of papers which covers the ANN
		  theory."
}
@Misc{Pre,
  author =	 "Lutz Prechelt",
  title =	 "{Frequently Asked Questions} ({FAQ}) in ``All
		  aspects of neural networks''",
  howpublished = "newsgroup: comp.ai.neural-nets",
  month =	 "monthly",
  abstract = 	 "More than an introduction. Here one finds references
		  for ANN research and applications."
}
@Book{Sch92,
  author = 	 "Robert J.~Schalkoff",
  title = 	 "Pattern recognition: Statistical, structural and
		  neural approaches",
  publisher = 	 "John Wiley \& Sons",
  year = 	 1992,
  address =	 "New York",
  abstract = 	 "Engineers approach for pattern recognition. Compares
		  statistical, , and ANN paradigms for problem
		  solving. Gives only little guidance on how to
		  develop an ANN."
}

@Book{Sim90,
  author = 	 "Patrik K.~Simpson",
  title = 	 "Artificial neural systems",
  publisher = 	 "Pergamon Press",
  year = 	 1990,
  series =	 "Neural networks: Research and applications",
  address =	 "Elmsford, NY",
  ISBN = 	 "0-08-037895-1",
  abstract = 	 "READ"
}
@Book{Sou+91,
  author = 	 "Branko Sou\v{c}ek and {the IRIS Group}",
  title = 	 "Neural and intelligent systems integration",
  publisher = 	 "John Wiley \& Sons",
  year = 	 1991,
  series =	 "Sixth-generation computer technology series",
  address =	 "New York",
  abstract = 	 "READ"
}
@Book{Zur92,
  author = 	 "Zurada, Jacekk M.",
  title = 	 "Introduction to artificial neural systems",
  publisher = 	 "West Publishing",
  year = 	 1992,
  address =	 "St.~Paul, MN",
  abstract = 	 "A deep introduction to ANN. Especially suitable for
		  university courses. Many examples and
		  exercises. Doesn't cover all network types. While I
		  was using the book I was faced with several
		  errors. Maybe it is worth to wait for the next edition."
}

%system indentification
@InProceedings{LjS92, 
  author = 	 "Lennart Ljung and J. Sj{\"o}berg",
  title = 	 "A system identification perspective on neural nets",
  year = 	 1992,
  editor =	 "S. Y. Kung and F. Fallside and J. Ha. Sorenson and
		  C. A. Kamm",
  series =	 "Proceedings of the 1992 IEEE Workshop",
  pages =	 "423--435",
  booktitle =	 "Neural Networks for Signal Processing II",
  organization = "IEEE Signal Processing Society Neural Networks
		  Technical Commitee",
  publisher =	 "Danish Computational Neural Network Center
		  (CONNECT), Technical University of Denmark, IEEE
		  Signal Processing, IEEE Neural Networks Council",
  month =	 "May",
  abstract =     "System Identification: Model structures of ANN is
		  only a new structure of system indentification."
}

@Book{Lju87,
  author = 	 "Lennart Ljung",
  title = 	 "System indetification: Theory for the user",
  publisher = 	 PH,
  year = 	 1987,
  series =	 "Prentice-Hell information and system sciences series",
  address =	 PHadr
}

%other
@InProceedings{Hae77,
  author = 	 "Wolfgang H{\"a}ndler",
  title = 	 "The impact of classification schemes on computer
		  architecture",
  pages =	 "7--15",
  booktitle =	 "Proceedings of the 1977 International Conference on
		  Parallel Processing",
  year =	 1977,
  organization = "IEEE",
  abstract = 	 "The influence of theoretical aspects, schemes on the
		  implementation of computer architecture"
}


