@article{b2efc7a0a7104b619dc22084841a8297,
title = "SAIBERSOC: A Methodology and Tool for Experimenting with Security Operation Centers",
abstract = "In this article, we introduce SAIBERSOC (Synthetic Attack Injection to Benchmark and Evaluate the Performance of Security Operation Centers), a tool and methodology enabling security researchers and operators to evaluate the performance of deployed and operational Security Operation Centers (SOC) — or any other security monitoring infrastructure. The methodology relies on the MITRE ATT&CK Framework to define a procedure to generate and automatically inject synthetic attacks in an operational SOC to evaluate any output metric of interest (e.g., detection accuracy, time-to-investigation). To evaluate the effectiveness of the proposed methodology, we devise an experiment with n=124 students playing the role of SOC analysts. The experiment relies on a real SOC infrastructure and assigns students to either a BADSOC or a GOODSOC experimental condition. Our results show that the proposed methodology is effective in identifying variations in SOC performance caused by (minimal) changes in SOC configuration. We release the SAIBERSOC tool implementation as free and open source software.",
keywords = "SOC, Cyber security operations center, evaluation, performance",
author = "Martin Rosso and Michele Campobasso and Ganduulga Gankhuyag and Luca Allodi",
year = "2022",
month = jun,
doi = "10.1145/3491266",
language = "English",
volume = "3",
journal = "Digital Threats: Research and Practice",
issn = "2692-1626",
publisher = "Association for Computing Machinery, Inc",
number = "2",
}