NeuralNetwork.hs
Here is the Haskell code that I wrote to make a neural network. Next I will train it to play Tic tac toe.
Here starts Neuron.hs
module NeuralNetwork.Neuron where
import Data.Binary
import qualified Data.Sequence as Seq
data Node = Neuron { bias :: Double, priors :: [Int], weights :: [Double], hidden :: Bool } |
Input { value :: Double } deriving (Show, Eq)
instance Binary Node where
put (Neuron{bias = bias, priors = priors, weights = weights, hidden = hidden}) =
do put (0 :: Word8); put bias; put priors; put weights; put hidden
put (Input{value = value}) = do put (1 :: Word8); put value
get = do t <- get :: Get Word8
case t of
0 -> do b <- get; p <- get; w <- get; h <- get;
return Neuron {bias = b, priors = p, weights = w, hidden = h}
1 -> do value <- get;
return Input {value = value}
adjustWeights :: [Double] -> Double -> Node -> Node
adjustWeights inputs error (Neuron b p w h) =
let w' = zipWith (+) (fmap (*error) inputs) w
b' = b + error
in Neuron b' p w' h
adjustWeights _ _ n = n
result :: Seq.Seq Double -> Node -> Double
result _ (Input value) = value
result values node = transferFunction $ rawResult values node
rawResult :: Seq.Seq Double -> Node -> Double
rawResult values (Neuron b p w _) = b + sum (zipWith (*) (fmap (Seq.index values) p) w)
rawResult _ (Input value) = value
transferFunction :: (Floating a) => a -> a
transferFunction x = 1/(1+exp(-x))
Here starts NeuralNetwork
module NeuralNetwork.NeuralNetwork
(NeuralNetwork,
replaceInputNodes,
newInputNodes,
generateVariant,
results,
makeInputs,
emptyNetwork,
randomNetwork,
makeNetwork) where
import qualified Data.Foldable as Foldable
import qualified Data.Sequence as Seq
import NeuralNetwork.Neuron
import Data.Binary
import System.Random
data NeuralNetwork = NeuralNetwork { nodes :: [Node] } deriving (Show, Eq)
instance Binary NeuralNetwork where
put NeuralNetwork{nodes = nodes} = do put nodes
get = do nodes <- get; return NeuralNetwork {nodes = nodes}
inputNodes :: NeuralNetwork -> [Node]
inputNodes NeuralNetwork {nodes = nodes} = [v | v@Input {} <- nodes]
outputNodes :: NeuralNetwork -> [Node]
outputNodes NeuralNetwork {nodes = nodes} =
[x | x@(Neuron {}) <- nodes, not (hidden x)]
replaceInputNodes :: NeuralNetwork -> [Node] -> NeuralNetwork
replaceInputNodes NeuralNetwork {nodes = nodes} newI =
NeuralNetwork $ newI ++ (drop (length newI) nodes)
newInputNodes :: NeuralNetwork -> [Double] -> NeuralNetwork
newInputNodes n values = replaceInputNodes n $ makeInputs (length values) values
generateVariant :: (RandomGen g) => (Int, Double, Double, g) -> NeuralNetwork -> NeuralNetwork
generateVariant (chance, min, max, g) NeuralNetwork {nodes = nodes} =
let nLength = length nodes
doIts = take nLength $ randomRs (1, chance) g
rndds = drop nLength $ randomRs (min, max) g
rnds = (doIts, rndds) in
NeuralNetwork $ varyNodes rnds nodes
varyNodes :: ([Int], [Double]) -> [Node] -> [Node]
varyNodes _ [] = []
varyNodes (1 : ds, rs) (Neuron b p w h : xs) =
let w' = zipWith (+) w rs
rs' = drop (length w) rs in
Neuron b p w' h : varyNodes (ds, rs') xs
varyNodes (_ : ds, rs) (x : xs) = x : varyNodes (ds, rs) xs
propigateErrors :: [Double] -> NeuralNetwork -> [Double]
propigateErrors expected network@(NeuralNetwork nodes) =
let actual = results network
outputDeltas = zipWith (-) expected actual
missingDeltas = length nodes - length outputDeltas
deltas = take missingDeltas (repeat 0) ++ outputDeltas
nds = zip nodes deltas in
foldErrors [] nds
foldErrors :: [Double] -> [(Node, Double)] -> [Double]
foldErrors es [] = es
foldErrors es ns =
let (n, e) = last ns
p = case n of Neuron _ p _ _ -> p
_ -> []
w = case n of Neuron _ _ w _ -> w
_ -> []
seq = Seq.fromList (init ns)
pws = zip p w
mrs = munge seq pws e
ns' = Foldable.toList mrs
in foldErrors (e : es) ns'
munge :: Seq.Seq (Node, Double) -> [(Int, Double)] -> Double -> Seq.Seq (Node, Double)
munge ns [] _ = ns
munge ns ((p, w) : pws) e =
let n = Seq.index ns p
n' = (fst n, snd n + e * w)
ns' = Seq.update p n' ns in
munge ns' pws e
errorCorrect :: [Double] -> Double -> NeuralNetwork -> NeuralNetwork
errorCorrect expected learnRate n =
let errors = propigateErrors expected n
outputs = allResults n in
NeuralNetwork . reverse $ adjustAllWeights (nodes n) errors outputs learnRate
adjustAllWeights :: [Node] -> [Double] -> [Double] -> Double -> [Node]
adjustAllWeights [] _ _ _ = []
adjustAllWeights nodes es os learnRate =
let error = last es
output = last os
n = last nodes
p = case n of Neuron _ p _ _ -> p
_ -> []
inputs = getItems (Seq.fromList os) p
derivative = output * (1 - output)
errorMash = error * derivative * learnRate
n' = adjustWeights inputs errorMash n
in (n' : adjustAllWeights (init nodes) (init es) (init os) learnRate)
getItems :: Seq.Seq a -> [Int] -> [a]
getItems s [] = []
getItems s (x:xs) = Seq.index s x : getItems s xs
allResults :: NeuralNetwork -> [Double]
allResults NeuralNetwork {nodes = n} =
let (all, results) =
foldl resultFold (Seq.fromList [], Seq.fromList []) n in
Foldable.toList all
results :: NeuralNetwork -> [Double]
results NeuralNetwork {nodes = n} =
let (all, results) =
foldl resultFold (Seq.fromList [], Seq.fromList []) n in
Foldable.toList results
resultFold :: (Seq.Seq Double, Seq.Seq Double) -> Node -> (Seq.Seq Double, Seq.Seq Double)
resultFold (all, outs) x@(Neuron b p w False) =
let r = result all x in
(all Seq.|> r, outs Seq.|> r)
resultFold (all, outs) x =
let r = result all x in
(all Seq.|> r, outs)
makeInputs :: Int -> [Double] -> [Node]
makeInputs n xs = foldr (\x acc -> Input x : acc) [] (take n xs)
makeHiddens :: [Double] -> [Int] -> Int -> Int -> ([Node], Int, Int, [Double])
makeHiddens r [] s l = ([], s, l, r)
makeHiddens rnd (x:xs) start length =
let (list, lStart, lLength, rndOut) =
(makeHiddens (drop (length * x) rnd) xs (start + length) x) in
(makeNeurons x rnd start length True ++ list, lStart, lLength, rndOut)
makeNeurons :: Int -> [Double] -> Int -> Int -> Bool -> [Node]
makeNeurons 0 _ _ _ _ = []
makeNeurons n gen start length hidden =
Neuron 1 [start..start+length-1] (take length gen) hidden :
makeNeurons (n - 1) (drop length gen) start length hidden
emptyNetwork :: [Int] -> NeuralNetwork
emptyNetwork layerLengths = makeNetwork (repeat 1) layerLengths
randomNetwork :: (RandomGen g) => (Double, Double, g) -> [Int] -> NeuralNetwork
randomNetwork (min, max, g) layerLengths =
makeNetwork (randomRs (min, max) g) layerLengths
makeNetwork :: [Double] -> [Int] -> NeuralNetwork
makeNetwork gen layerLengths =
let inputs = head layerLengths
(hiddens, start, length, genOut) =
makeHiddens gen (tail (init layerLengths)) 0 inputs in
NeuralNetwork (makeInputs inputs (repeat 1) ++
hiddens ++
makeNeurons (last layerLengths) gen start length False)
The following code would allow you to make a Neural network
import System.Environmentimport Codec.Compression.GZip
import NeuralNetwork.NeuralNetwork
import qualified Data.ByteString.Lazy.Char8 as C
import Data.Binary
import System.Random
import System.Exit
main = do
args <- getArgs --[String]
progName <- getProgName --String
handleInputs (length args) args progName
handleInputs :: Int -> [String] -> String -> IO ()
handleInputs count inputs prog
| count < 5 = do
putStrLn $ "Usage " ++ prog ++ " outFileName minWeight maxWeight inputCount [hiddenCount] outputCount"
exitWith (ExitFailure 1)
| otherwise = do
gen <- getStdGen --Random
let fileName = inputs !! 0
let min = read $ inputs !! 1 :: Double
let max = read $ inputs !! 2 :: Double
let nodes = fmap (\x -> read x :: Int) (drop 3 inputs)
let n = randomNetwork (min, max, gen) nodes
let t = compress (encode n)
C.writeFile fileName t
import System.Environmentimport Codec.Compression.GZip
import NeuralNetwork.NeuralNetwork
import qualified Data.ByteString.Lazy.Char8 as C
import Data.Binary
import System.IO
import System.Exit
import Control.Monad
main = do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
args <- getArgs --[String]
progName <- getProgName --String
handleInputs (length args) args progName
handleInputs :: Int -> [String] -> String -> IO ()
handleInputs count inputs prog
| count < 1 = do
putStrLn $ "Usage " ++ prog ++ " inFileName"
exitWith (ExitFailure 1)
| otherwise = do
let fileName = inputs !! 0
f <- C.readFile fileName
let n = decode (decompress f) :: NeuralNetwork
forever $ do
i <- getLine
let values = fmap (\x -> read x :: Double) $ words i
let n' = newInputNodes n values
putStrLn . unwords $ fmap (\x -> show x) (results n')
Comments
Post a Comment