module Moons-2-ReLU-10-ReLU-10-Softmax-2 where

open import Data.Float as Float using (Float)
open import Data.List as List using (List; []; _∷_)
open import Data.Vec as Vec using (Vec; []; _∷_)
open import Amethyst.Network
open import Amethyst.LinearAlgebra.As.Schmitty

layer0 : Layer Float 2 10
layer0 = record
  { weights    = (0.569390952587128  0.00407964549958706  0.520559012889862  0.0512562803924084  0.546230375766754  -0.591473519802094  -0.206871315836906  0.40884655714035  -0.540547728538513  -0.637664973735809  [])
                (0.109549351036549  0.77337920665741  -0.0697582066059113  0.584739863872528  -0.627979218959808  0.335013180971146  0.539017379283905  0.854615986347198  -0.263842165470123  -0.00856650620698929  [])
                []
  ; biases     = 0.1730867177248  0.133806750178337  0.202945545315742  0.0830471590161324  0.158191606402397  -0.00530562316998839  0.141202166676521  0.0456042140722275  -0.0250585116446018  0.00837907567620277  []
  ; activation = Activation.relu
  }

layer1 : Layer Float 10 10
layer1 = record
  { weights    = (-0.136047258973122  0.275619089603424  0.183129414916039  -0.271167546510696  -0.0621173679828644  0.114380948245525  0.0963571146130562  0.418078809976578  0.256011784076691  -0.213664948940277  [])
                (0.0256014186888933  -0.719946086406708  0.245413392782211  -0.110843047499657  0.217050150036812  0.367228209972382  -0.0518544130027294  -0.149911731481552  0.119882583618164  0.159078389406204  [])
                (-0.569350838661194  0.573630094528198  -0.313661932945251  -0.755025088787079  0.225708708167076  0.275034934282303  -0.348928421735764  0.0422560349106789  -0.253630548715591  0.396881073713303  [])
                (-0.197024494409561  -0.54819130897522  0.543577253818512  -0.158019036054611  -0.28152322769165  -0.282698392868042  0.138884365558624  -0.582330048084259  0.0502770021557808  0.0883368775248528  [])
                (-0.110343217849731  0.297101587057114  0.131345525383949  -0.212669715285301  0.684113442897797  -0.362706750631332  -0.226534947752953  0.412587732076645  0.241236954927444  0.168373093008995  [])
                (0.457192093133926  0.393099009990692  0.129057720303535  0.441554576158524  -0.144818633794785  0.465180724859238  0.394037961959839  -0.406313449144363  0.237114012241364  -0.271066009998322  [])
                (-0.0182914827018976  -0.375924915075302  0.44120791554451  0.166606575250626  0.342226058244705  0.613215446472168  -0.16384644806385  0.343180328607559  -0.00335896364413202  0.0832106024026871  [])
                (0.17581282556057  0.0369631871581078  0.171982541680336  -0.0114386519417167  -0.280146867036819  0.629503607749939  0.659626245498657  0.241863638162613  0.159142091870308  -0.41048139333725  [])
                (-0.52195131778717  -0.451420903205872  -0.191146865487099  0.121404729783535  -0.302287101745605  0.24384780228138  -0.0270811971276999  -0.0305070951581001  -0.0943672060966492  0.272052317857742  [])
                (0.322369903326035  -0.029862254858017  0.00615040538832545  0.445162922143936  -0.0252297818660736  0.458946466445923  0.709194540977478  0.0741148963570595  -0.397431403398514  -0.460754334926605  [])
                []
  ; biases     = -0.0625878497958183  0.140043646097183  -0.0265906099230051  0.15484519302845  0.0700908452272415  0.0243934523314238  0.164417609572411  0.165570825338364  0.038091916590929  0.240999609231949  []
  ; activation = Activation.relu
  }

layer2 : Layer Float 10 2
layer2 = record
  { weights    = (0.106655806303024  0.04444370418787  [])
                (-0.880266308784485  0.243414700031281  [])
                (0.507353961467743  -0.430569797754288  [])
                (0.800903141498566  0.0122971683740616  [])
                (-0.0907366797327995  0.677822589874268  [])
                (0.404959321022034  -0.641630291938782  [])
                (0.468036532402039  -0.50335556268692  [])
                (-0.522089660167694  0.252658665180206  [])
                (0.205440521240234  0.270680963993073  [])
                (-0.0547852478921413  0.839694499969482  [])
                []
  ; biases     = -0.0451310239732265  0.045131016522646  []
  ; activation = Activation.softmax
  }

model : Network Float (2  10  10  2  [])
model = layer0  layer1  layer2  []