# by Claude Heiland-Allen 2022
# see <https://mathr.co.uk/blog/2022-06-24_counting_artificial_neural_networks.html>
# don't use parallelism unless you have 20 GB of RAM per job
# test size can be set on the command line with "make test limit=6"

limit = 7

all: exhaustive pruning memoizing

clean:
	-rm exhaustive exhaustive.hi exhaustive.o
	-rm pruning
	-rm memoizing memoizing.hi memoizing.o

# takes over 15 minutes wall-clock using 20 GB peak RAM on my machine with limit = 8
test: exhaustive pruning memoizing
	time ./exhaustive $(limit) 0
	time ./pruning $(limit) 0
	time ./memoizing $(limit) 0
	time ./exhaustive $(limit) 1
	time ./pruning $(limit) 1
	time ./memoizing $(limit) 1

# takes about 15 minutes wall-clock using 21 GB peak RAM on my machine
data: unbiased.txt biased.txt

unbiased.txt: memoizing
	time ./memoizing 100 0 > unbiased.txt

biased.txt: memoizing
	time ./memoizing 100 1 > biased.txt

exhaustive: exhaustive.hs
	ghc -O2 -Wall exhaustive.hs

pruning: pruning.c
	gcc -O3 -fopenmp -std=c99 -Wall -Wextra -pedantic pruning.c -o pruning -DBITS=32

memoizing: memoizing.hs
	ghc -O2 -Wall memoizing.hs

plots: unbiased.png biased.png

unbiased.png biased.png: plots.gnuplot unbiased.txt biased.txt
	gnuplot < plots.gnuplot
