#LyX 2.3 created this file. For more info see http://www.lyx.org/ \lyxformat 544 \begin_document \begin_header \save_transient_properties true \origin unavailable \textclass article \begin_preamble \def\changemargin#1#2{\list{}{\rightmargin#2\leftmargin#1}\item[]} \let\endchangemargin=\endlist \pagenumbering{gobble} \usepackage{pxfonts} \usepackage{color} \definecolor{commentgreen}{RGB}{0,94,11} \definecolor{darkblue}{rgb}{0,0,0.75} \definecolor{darkred}{rgb}{0.6,0,0} \end_preamble \use_default_options true \begin_modules customHeadersFooters minimalistic todonotes \end_modules \maintain_unincluded_children false \language british \language_package default \inputencoding utf8 \fontencoding global \font_roman "default" "default" \font_sans "default" "default" \font_typewriter "default" "default" \font_math "auto" "auto" \font_default_family default \use_non_tex_fonts false \font_sc false \font_osf false \font_sf_scale 100 100 \font_tt_scale 100 100 \use_microtype true \use_dash_ligatures true \graphics default \default_output_format default \output_sync 0 \bibtex_command biber \index_command default \paperfontsize default \spacing onehalf \use_hyperref true \pdf_title "Training Neural Networks With Backpropagation" \pdf_author "Andy Pack" \pdf_subject "EEEM005" \pdf_keywords "EEEM005" \pdf_bookmarks true \pdf_bookmarksnumbered false \pdf_bookmarksopen false \pdf_bookmarksopenlevel 1 \pdf_breaklinks false \pdf_pdfborder true \pdf_colorlinks false \pdf_backref false \pdf_pdfusetitle true \papersize default \use_geometry true \use_package amsmath 1 \use_package amssymb 1 \use_package cancel 1 \use_package esint 1 \use_package mathdots 1 \use_package mathtools 1 \use_package mhchem 1 \use_package stackrel 1 \use_package stmaryrd 1 \use_package undertilde 1 \cite_engine biblatex \cite_engine_type authoryear \biblio_style plain \biblio_options urldate=long \biblatex_bibstyle ieee \biblatex_citestyle ieee \use_bibtopic false \use_indices false \paperorientation portrait \suppress_date true \justification true \use_refstyle 1 \use_minted 0 \index Index \shortcut idx \color #008000 \end_index \leftmargin 1.8cm \topmargin 2cm \rightmargin 1.8cm \bottommargin 2cm \secnumdepth 3 \tocdepth 3 \paragraph_separation skip \defskip medskip \is_math_indent 0 \math_numbering_side default \quotes_style british \dynamic_quotes 0 \papercolumns 1 \papersides 1 \paperpagestyle fancy \listings_params "language=Python,breaklines=true,frame=tb,otherkeywords={self},emph={State},emphstyle={\ttb\color{darkred}},basicstyle={\ttfamily},commentstyle={\bfseries\color{commentgreen}\itshape},keywordstyle={\color{darkblue}},emphstyle={\color{red}},stringstyle={\color{red}}" \bullet 1 0 9 -1 \bullet 2 0 24 -1 \tracking_changes false \output_changes false \html_math_output 0 \html_css_as_file 0 \html_be_strict false \end_header \begin_body \begin_layout Title \size giant Training Neural Networks with Backpropagation \end_layout \begin_layout Author Andy Pack \end_layout \begin_layout Standard \begin_inset VSpace 15pheight% \end_inset \end_layout \begin_layout Standard \align center \begin_inset Graphics filename surrey.png lyxscale 15 width 40col% \end_inset \end_layout \begin_layout Standard \begin_inset VSpace vfill \end_inset \end_layout \begin_layout Standard \noindent \align center EEEM005 \begin_inset Newline newline \end_inset May 2021 \size large \begin_inset Newline newline \end_inset Department of Electrical and Electronic Engineering \begin_inset Newline newline \end_inset Faculty of Engineering and Physical Sciences \begin_inset Newline newline \end_inset University of Surrey \end_layout \begin_layout Standard \begin_inset Newpage newpage \end_inset \end_layout \begin_layout Section* Executive Summary \end_layout \begin_layout Standard Summary here \end_layout \begin_layout Standard \begin_inset Newpage newpage \end_inset \end_layout \begin_layout Standard \begin_inset ERT status open \begin_layout Plain Layout \backslash pagenumbering{roman} \end_layout \end_inset \end_layout \begin_layout Abstract abstract \end_layout \begin_layout Standard \begin_inset CommandInset toc LatexCommand tableofcontents \end_inset \end_layout \begin_layout Standard \begin_inset Newpage pagebreak \end_inset \end_layout \begin_layout Standard \begin_inset FloatList figure \end_inset \end_layout \begin_layout Standard \begin_inset FloatList table \end_inset \end_layout \begin_layout Standard \begin_inset Newpage pagebreak \end_inset \end_layout \begin_layout Right Footer Andy Pack / 6420013 \end_layout \begin_layout Left Footer May 2021 \end_layout \begin_layout Standard \begin_inset ERT status open \begin_layout Plain Layout \backslash pagenumbering{arabic} \end_layout \begin_layout Plain Layout \backslash setcounter{page}{1} \end_layout \end_inset \end_layout \begin_layout Section Introduction \end_layout \begin_layout Standard Artificial neural networks have been the object of research and investigation since the 1940s with \noun on McCulloch \noun default and \noun on Pitts \noun default ' model of the artificial neuron \begin_inset CommandInset citation LatexCommand cite key "McCulloch1943" literal "false" \end_inset or \emph on Threshold Logic Unit \emph default . Throughout the century, the development of the single and multi-layer perceptro ns (SLP/MLP) alongside the backpropagation algorithm \begin_inset CommandInset citation LatexCommand cite key "Rumelhart1986" literal "false" \end_inset advanced the study of artificial intelligence. Throughout the 2010s, convolutional neural networks have proved critical in the field of computer vision and image recognition \begin_inset CommandInset citation LatexCommand cite key "alexnet" literal "false" \end_inset . \end_layout \begin_layout Standard This work investigates the ability of a shallow multi-layer perceptron to classify breast tumours as either benign or malignant. The architecture and parameters were varied before exploring how in order to evaluate how this affects performance. \end_layout \begin_layout Standard Investigations were carried out in \noun on Python \noun default using the \noun on TensorFlow \noun default package to construct, train and evaluate neural networks. The networks were trained using a supervised learning curriculum of labelled data taken from a standard \noun on MatLab \noun default dataset \begin_inset CommandInset citation LatexCommand cite key "matlab-dataset" literal "false" \end_inset from the \noun on Deep Learning Toolbox \noun default . \end_layout \begin_layout Standard Section \begin_inset CommandInset ref LatexCommand ref reference "sec:exp1" plural "false" caps "false" noprefix "false" \end_inset investigates the effect of varying the number of hidden nodes on test accuracy along with the number of epochs that the MLPs are trained for. Section \begin_inset CommandInset ref LatexCommand ref reference "sec:exp2" plural "false" caps "false" noprefix "false" \end_inset builds on the previous experiment by using reasonable parameter values to investigate performance when using an ensemble of models to classify in conjunction. The effect of varying the number of nodes and epochs throughout the ensemble was considered in order to determine whether combining multiple models could produce a better accuracy than those individually. Section \begin_inset CommandInset ref LatexCommand ref reference "sec:exp3" plural "false" caps "false" noprefix "false" \end_inset investigates the effect of altering how the networks learn by changing the optimisation algorithm. Two additional algorithms to the previously used are considered and compared using the same test apparatus of section \begin_inset CommandInset ref LatexCommand ref reference "sec:exp2" plural "false" caps "false" noprefix "false" \end_inset . \end_layout \begin_layout Section Hidden Nodes & Epochs (Exp 1) \begin_inset CommandInset label LatexCommand label name "sec:exp1" \end_inset \end_layout \begin_layout Standard This section investigates the effect of varying the number of hidden nodes in a single hidden layer of a multi-layer perceptron. This is compared to the effect of varying \end_layout \begin_layout Subsection Results \end_layout \begin_layout Subsection Discussion \end_layout \begin_layout Section Ensemble Classification (Exp 2) \begin_inset CommandInset label LatexCommand label name "sec:exp2" \end_inset \end_layout \begin_layout Subsection Results \end_layout \begin_layout Subsection Discussion \end_layout \begin_layout Section Optimiser Comparisons (Exp 3) \begin_inset CommandInset label LatexCommand label name "sec:exp3" \end_inset \end_layout \begin_layout Subsection Optimisers \end_layout \begin_layout Subsubsection Stochastic Gradient Descent \end_layout \begin_layout Subsubsection RMSprop \end_layout \begin_layout Subsubsection Adam \end_layout \begin_layout Subsection Results \end_layout \begin_layout Subsection Discussion \end_layout \begin_layout Section Overlapping 2D Gaussians (Exp 4) \end_layout \begin_layout Section Conclusions \end_layout \begin_layout Standard \begin_inset Newpage newpage \end_inset \end_layout \begin_layout Standard \begin_inset CommandInset label LatexCommand label name "sec:bibliography" \end_inset \begin_inset CommandInset bibtex LatexCommand bibtex btprint "btPrintCited" bibfiles "references" options "bibtotoc" \end_inset \end_layout \begin_layout Section \start_of_appendix Source Code \end_layout \begin_layout Standard \begin_inset CommandInset include LatexCommand lstinputlisting filename "../nncw.py" lstparams "caption={Formatted Jupyter notebook containing experiment code},label={notebook-code}" \end_inset \end_layout \end_body \end_document