@inproceedings{a9089caabf7849cbac48309b3ddde5ef,
title = "Differentiable Logics for Neural Network Training and Verification",
abstract = "Neural network (NN) verification is a problem that has drawn attention of many researchers. The specific nature of neural networks does away with the conventional assumption that a static program is given for verification as in the case of NNs multiple models can be used if one fails a new one can be trained leading to an approach called continuous verification, referring to the loop between training and verification. One tactic for improving the network's performance is through {"}constraint-based loss functions{"} - a method of using differentiable logic (DL) to translate logical constraints into loss functions which can then be used to train the network specifically to satisfy said constraint. In this paper we present a uniform way of defining a translation from logic syntax to a differentiable loss function then examine and compare the existing DLs. We explore mathematical properties desired in such translations and discuss the design space identifying possible directions of future work.",
author = "Natalia Slusarz and Ekaterina Komendantskaya and Daggitt, {Matthew L.} and Robert Stewart",
year = "2022",
doi = "10.1007/978-3-031-21222-2_5",
language = "English",
isbn = "978-3-031-21221-5",
volume = "13466",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Nature",
pages = "67--77",
editor = "O Isac and R Ivanov and G Katz and N Narodytska and L Nenzi",
booktitle = "Software Verification And Formal Methods For Ml-enabled Autonomous Systems",
address = "United States",
note = "5th International Workshop on Formal Methods for ML-Enabled Autonomous Systems (FoMLAS) ; Conference date: 31-07-2022 Through 01-08-2022",
}