@inproceedings{Ferber2020network, title = {Neural network heuristics for classical planning: A study of hyperparameter space}, author = {Patrick Ferber and J{\"o}rg Hoffmann and Malte Helmert}, url = {https://ecai2020.eu/papers/433_paper.pdf}, year = {2020}, date = {2020}, booktitle = {24th European Conference on Artificial Intelligence (ECAI’20)}, abstract = {Neural networks (NN) have been shown to be powerful state-value predictors in several complex games. Can similar successes be achieved in classical planning? Towards a systematic exploration of that question, we contribute a study of hyperparameter space in the most canonical setup: input = state, feed-forward NN, supervised learning, generalization only over initial state. We investigate a broad range of hyperparameters pertaining to NN design and training. We evaluate these techniques through their use as heuristic functions in Fast Downward. The results on IPC benchmarks show that highly competitive heuristics can be learned, yielding substantially smaller search spaces than standard techniques on some domains. But the heuristic functions are costly to evaluate, and the range of domains where useful heuristics are learned is limited. Our study provides the basis for further research improving on current weaknesses.}, pubstate = {published}, type = {inproceedings} }