Compare commits
789 Commits
2.4.1-2023
...
2.4.3-2023
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
37116a9bdd | ||
|
|
6297a2632b | ||
|
|
5cc752f128 | ||
|
|
68d95cd1cb | ||
|
|
1a68c3cd24 | ||
|
|
87eec4ae88 | ||
|
|
676696b24a | ||
|
|
da27fce95f | ||
|
|
8acc37a7d1 | ||
|
|
5f1b467e64 | ||
|
|
fe7fb7f54d | ||
|
|
577bfac886 | ||
|
|
468b6e4831 | ||
|
|
c75d209d7f | ||
|
|
b29b264d5c | ||
|
|
c99e7da5a7 | ||
|
|
60d66b973c | ||
|
|
304830d2ee | ||
|
|
d7285d69a7 | ||
|
|
7cdd1f89d7 | ||
|
|
b7cab1d118 | ||
|
|
f03a472ee5 | ||
|
|
c7a0801eed | ||
|
|
5e0015e9ac | ||
|
|
5a72c558cb | ||
|
|
a6e907f76c | ||
|
|
2d3eb22057 | ||
|
|
8437fcd94c | ||
|
|
1b25db4573 | ||
|
|
f8ed2e6e8e | ||
|
|
f22c61a0a2 | ||
|
|
5069d1163c | ||
|
|
31edf2e8ea | ||
|
|
6b8893ded5 | ||
|
|
1f8b7bda89 | ||
|
|
59233d6550 | ||
|
|
1ac72e5b24 | ||
|
|
7805ca8beb | ||
|
|
47b2481cdd | ||
|
|
fa933d3f53 | ||
|
|
f4dc73a206 | ||
|
|
437c9cab68 | ||
|
|
6da96a733f | ||
|
|
82796370ce | ||
|
|
8c16feb772 | ||
|
|
ce1f363424 | ||
|
|
e8860a7d2c | ||
|
|
beb26596fd | ||
|
|
6a5ff04804 | ||
|
|
ff3bb11fbb | ||
|
|
8be5082b60 | ||
|
|
5faa4f0a30 | ||
|
|
da7770a900 | ||
|
|
8178338971 | ||
|
|
79ed17b506 | ||
|
|
2785587840 | ||
|
|
9f95306458 | ||
|
|
55bed0771b | ||
|
|
0b5ee49873 | ||
|
|
1646459052 | ||
|
|
8ec003d89f | ||
|
|
224f0606c2 | ||
|
|
910125f13a | ||
|
|
5eca1acbeb | ||
|
|
d551faeb16 | ||
|
|
6a6afeef75 | ||
|
|
869f60ccaa | ||
|
|
12c82d2812 | ||
|
|
a2b50c6d40 | ||
|
|
ab7ae6cddd | ||
|
|
7a9a12ae3d | ||
|
|
b49a296276 | ||
|
|
9b9321d23a | ||
|
|
1922ad95d5 | ||
|
|
9de8814412 | ||
|
|
35e7659904 | ||
|
|
d31ea4097d | ||
|
|
c277b7acfa | ||
|
|
97a9e0989d | ||
|
|
6bdccec6b1 | ||
|
|
35945ed224 | ||
|
|
7319d7ae9b | ||
|
|
8b38cbe8cf | ||
|
|
35ea084466 | ||
|
|
c89582ffb6 | ||
|
|
d6db94a4d4 | ||
|
|
e2acf027a9 | ||
|
|
d6d8ba7479 | ||
|
|
41a4321b03 | ||
|
|
2ae049071d | ||
|
|
e82df53997 | ||
|
|
273e78da94 | ||
|
|
446376395e | ||
|
|
a13001dce0 | ||
|
|
8819e1d4d6 | ||
|
|
1baea3bcd5 | ||
|
|
1c37c05824 | ||
|
|
cd1db36c13 | ||
|
|
5898c9ef31 | ||
|
|
951f04c265 | ||
|
|
4b069d91ab | ||
|
|
7561ec0512 | ||
|
|
eef1b40436 | ||
|
|
34db6fb823 | ||
|
|
eeaf077baf | ||
|
|
120d21c0da | ||
|
|
66457ad8f8 | ||
|
|
69670c481d | ||
|
|
cae011babb | ||
|
|
02ea939abc | ||
|
|
be028aa23e | ||
|
|
24b7f7a7ce | ||
|
|
12cce111db | ||
|
|
add72d7a5c | ||
|
|
c7a1d4758b | ||
|
|
8436b647dd | ||
|
|
77e6ee3c36 | ||
|
|
3e71663669 | ||
|
|
984971c63c | ||
|
|
6adef20a06 | ||
|
|
cb8faf7c5f | ||
|
|
740723ecd6 | ||
|
|
d70371c540 | ||
|
|
b6986d5c61 | ||
|
|
02e6e11be7 | ||
|
|
d26484fe1a | ||
|
|
12d10d7d42 | ||
|
|
7ea37ac2dd | ||
|
|
7aae72cfcf | ||
|
|
ec427cde08 | ||
|
|
c2efd7ef64 | ||
|
|
77c58e665e | ||
|
|
9530901d1d | ||
|
|
e83afa3e30 | ||
|
|
70fb28a8b3 | ||
|
|
8355432356 | ||
|
|
2247cafe5f | ||
|
|
85a8da6331 | ||
|
|
ddabab253c | ||
|
|
2e42eddbc2 | ||
|
|
07a590dda8 | ||
|
|
ec8eac3430 | ||
|
|
05b84327b8 | ||
|
|
0607532e4a | ||
|
|
3018886f72 | ||
|
|
e02bdffe34 | ||
|
|
5073d62ee8 | ||
|
|
e2ff48164b | ||
|
|
43832f9c34 | ||
|
|
5da5a04025 | ||
|
|
25b51135fc | ||
|
|
aa91c1fef2 | ||
|
|
801a5a6824 | ||
|
|
f63c26b7f2 | ||
|
|
336a40d646 | ||
|
|
bb0cfc5253 | ||
|
|
106aaa9c3e | ||
|
|
ff7db0be63 | ||
|
|
b96d3473f2 | ||
|
|
fb27e7c479 | ||
|
|
261acee8a0 | ||
|
|
a9585b2a7f | ||
|
|
62fa15c63e | ||
|
|
e995576b1d | ||
|
|
d247c9d704 | ||
|
|
b21b545756 | ||
|
|
5e8748c436 | ||
|
|
e2cca917c1 | ||
|
|
d8700137d2 | ||
|
|
2c42d4b19e | ||
|
|
a3c7e40c40 | ||
|
|
94fe456e28 | ||
|
|
662db41857 | ||
|
|
7623dd20b9 | ||
|
|
2b323ab661 | ||
|
|
8de01625a8 | ||
|
|
d0d7ab57ca | ||
|
|
f4cbe20ddf | ||
|
|
0d92a1594a | ||
|
|
daaead618e | ||
|
|
19469205e1 | ||
|
|
cae9e6230f | ||
|
|
6c4c815683 | ||
|
|
6769386c86 | ||
|
|
36272efda7 | ||
|
|
6b97d07a89 | ||
|
|
da82395dcf | ||
|
|
b5e5bd57ad | ||
|
|
ad4fb52b81 | ||
|
|
4e849ecc90 | ||
|
|
7e37cd0f05 | ||
|
|
3952c1a9b7 | ||
|
|
c13c37f406 | ||
|
|
9240c3c6f0 | ||
|
|
2aa01280e7 | ||
|
|
1675b787bf | ||
|
|
4866eb2315 | ||
|
|
f785fb2772 | ||
|
|
8c9f863808 | ||
|
|
1751e35121 | ||
|
|
6676afc7de | ||
|
|
699ea1ac3e | ||
|
|
90fdb9c465 | ||
|
|
48291f5271 | ||
|
|
3a41b090c1 | ||
|
|
139b36b189 | ||
|
|
6ddf887342 | ||
|
|
6ba9e057a9 | ||
|
|
6600484f8e | ||
|
|
b02c38175c | ||
|
|
4497f6561f | ||
|
|
0fc03baf58 | ||
|
|
fb81c6e2e3 | ||
|
|
ad28ea275f | ||
|
|
41951659ec | ||
|
|
451a4784a1 | ||
|
|
1b7095fa81 | ||
|
|
89d789fe0f | ||
|
|
49055e260f | ||
|
|
a465039887 | ||
|
|
b60cf29598 | ||
|
|
0e09d73aa0 | ||
|
|
520a5671ca | ||
|
|
fc824359ed | ||
|
|
7caa7cec6b | ||
|
|
0695140f83 | ||
|
|
ed1e2c8908 | ||
|
|
594900a8d4 | ||
|
|
6894fa4e4d | ||
|
|
2334d82d36 | ||
|
|
c0a2ea3138 | ||
|
|
d4acb1a33a | ||
|
|
5de9e5baf4 | ||
|
|
3a34da354f | ||
|
|
469390696e | ||
|
|
0a4a48b61e | ||
|
|
58a63e0765 | ||
|
|
251bc6f45e | ||
|
|
b84d997f87 | ||
|
|
b5bccc5e05 | ||
|
|
b4e5ac9796 | ||
|
|
2db95fe1b4 | ||
|
|
934b0f45a1 | ||
|
|
a88227d13f | ||
|
|
21a7b76352 | ||
|
|
03082339ca | ||
|
|
8f6226b531 | ||
|
|
2c4eccd7e0 | ||
|
|
fa57494694 | ||
|
|
3f1741e75a | ||
|
|
48331ce35b | ||
|
|
c2ac60b82e | ||
|
|
fedfbe9fec | ||
|
|
9947f9def4 | ||
|
|
c205438771 | ||
|
|
8cde05807c | ||
|
|
2ac0aba916 | ||
|
|
af003cc2a1 | ||
|
|
0d4f6b4fe6 | ||
|
|
7093254439 | ||
|
|
bd7644a557 | ||
|
|
90b740a997 | ||
|
|
5547a1b7ab | ||
|
|
1b90fd8581 | ||
|
|
bbdf7bb5a7 | ||
|
|
fb8ad71b27 | ||
|
|
e43b7607bb | ||
|
|
a265c06e31 | ||
|
|
2aa954cb0a | ||
|
|
73812b11a3 | ||
|
|
38ab426470 | ||
|
|
d0a6881c2c | ||
|
|
c7c4e65df1 | ||
|
|
49b150797d | ||
|
|
57268ba934 | ||
|
|
1208915896 | ||
|
|
42f5ad9939 | ||
|
|
8e0d895afb | ||
|
|
998c85e3f8 | ||
|
|
32f3ee0b01 | ||
|
|
a90aed25fb | ||
|
|
ae14e4870d | ||
|
|
273a1d7e9c | ||
|
|
b3f8ed7dcd | ||
|
|
ad5a424c03 | ||
|
|
e06787445c | ||
|
|
8a4f5d6dcb | ||
|
|
81dd951064 | ||
|
|
c12f138899 | ||
|
|
884a7041af | ||
|
|
023008c54c | ||
|
|
6f7de954d9 | ||
|
|
46371aaaf5 | ||
|
|
1fde2e2755 | ||
|
|
1aad9d1b2f | ||
|
|
9703e70163 | ||
|
|
f6735207d7 | ||
|
|
e5f76a9c6e | ||
|
|
d1c86cb9ff | ||
|
|
8ccb24dda2 | ||
|
|
932054e9da | ||
|
|
8b35002169 | ||
|
|
f68527d366 | ||
|
|
81e3d26540 | ||
|
|
96b60fa39a | ||
|
|
f172a74fbc | ||
|
|
c4be56ec7b | ||
|
|
96195806ab | ||
|
|
88bbd3440d | ||
|
|
495a9c0783 | ||
|
|
905bc564fc | ||
|
|
f6f387428f | ||
|
|
db5abcb3cf | ||
|
|
27e310c2a1 | ||
|
|
236eb0cbcc | ||
|
|
841d0b4b1f | ||
|
|
272f97e2d7 | ||
|
|
eac9a3fc86 | ||
|
|
32dc26f2e7 | ||
|
|
1b14142e4c | ||
|
|
2fef1d5fa7 | ||
|
|
3bbfc3865d | ||
|
|
6947fd6414 | ||
|
|
d3e5be78fd | ||
|
|
09e005127e | ||
|
|
d3ea596deb | ||
|
|
d6d315e8d5 | ||
|
|
58dc073678 | ||
|
|
8c9186d8dd | ||
|
|
aee842b912 | ||
|
|
3a5a59af59 | ||
|
|
8f3a874e61 | ||
|
|
66dc6274e6 | ||
|
|
302e580d8f | ||
|
|
4cf60a6054 | ||
|
|
8f6d82af97 | ||
|
|
8ab54dcead | ||
|
|
9704c8917e | ||
|
|
540ee156db | ||
|
|
344e2bf1d0 | ||
|
|
3441c0684e | ||
|
|
ed560f19d3 | ||
|
|
b3f6012856 | ||
|
|
9ae26ec866 | ||
|
|
20aaa79476 | ||
|
|
2bb77251b0 | ||
|
|
36791665f3 | ||
|
|
4d4744a89b | ||
|
|
f3be63051b | ||
|
|
743ed316f8 | ||
|
|
e4b4bbcfdc | ||
|
|
b6e090f29f | ||
|
|
25006ed20b | ||
|
|
4469a93a75 | ||
|
|
0027016b5a | ||
|
|
0143e2412d | ||
|
|
20212414c4 | ||
|
|
8a63ed5124 | ||
|
|
096dadf9bd | ||
|
|
b441fe662f | ||
|
|
e5117a343d | ||
|
|
b9d692eb0e | ||
|
|
36a7f54160 | ||
|
|
96134684dc | ||
|
|
374ab0779a | ||
|
|
d0d1cc9106 | ||
|
|
162a32fd08 | ||
|
|
9035fa3037 | ||
|
|
b4b87e5620 | ||
|
|
97c53d70a4 | ||
|
|
53b4f7bd5c | ||
|
|
192c8c78c7 | ||
|
|
62a063dae4 | ||
|
|
79014a53ec | ||
|
|
e910f04beb | ||
|
|
ef5b63337b | ||
|
|
799e92e595 | ||
|
|
c835c523a9 | ||
|
|
9ec1492fad | ||
|
|
5af1bfe142 | ||
|
|
482c5324db | ||
|
|
3c1f1cd50e | ||
|
|
aecd900203 | ||
|
|
89f5d9f292 | ||
|
|
de43a202a3 | ||
|
|
6176fa7ca5 | ||
|
|
9ff27e5b6a | ||
|
|
5922fc0e45 | ||
|
|
b48e259fee | ||
|
|
b4d85a7bf8 | ||
|
|
38881231ac | ||
|
|
b2d2a9f0ed | ||
|
|
32021cf272 | ||
|
|
4410e136b1 | ||
|
|
81d4584819 | ||
|
|
f765dc23ea | ||
|
|
657ef97d17 | ||
|
|
8f247f962a | ||
|
|
bcbdab1682 | ||
|
|
5b4ec70ca6 | ||
|
|
ce114a2601 | ||
|
|
5de59a879a | ||
|
|
a2e6469a38 | ||
|
|
5c933910aa | ||
|
|
a3c3f08511 | ||
|
|
9aa58be286 | ||
|
|
db56b3d6a3 | ||
|
|
7d6182a18f | ||
|
|
074f84ae4d | ||
|
|
8ce0d76287 | ||
|
|
3be3df00d1 | ||
|
|
d99d4756c3 | ||
|
|
0d83b13585 | ||
|
|
6505d3e2ce | ||
|
|
6edfadd18b | ||
|
|
9552510c7d | ||
|
|
36ddcfa4e5 | ||
|
|
fcc1337e1a | ||
|
|
10f9d0f4bd | ||
|
|
edf531739c | ||
|
|
11d7e66ea0 | ||
|
|
caaedee5a7 | ||
|
|
1bdd79c578 | ||
|
|
c199acc64e | ||
|
|
a01704a1d7 | ||
|
|
53f258b08f | ||
|
|
a308a39bbe | ||
|
|
5c00655ad0 | ||
|
|
67a608ea56 | ||
|
|
01d983fc00 | ||
|
|
d6f1bcfdf0 | ||
|
|
f156573f8d | ||
|
|
b3e0e68896 | ||
|
|
86803f1fb5 | ||
|
|
aad08a830b | ||
|
|
c9db6c0f18 | ||
|
|
d9a9c8738c | ||
|
|
cb0ed9ae6d | ||
|
|
4f72fca2d7 | ||
|
|
1dc426b8ce | ||
|
|
8995012c80 | ||
|
|
2c4ba2e8b2 | ||
|
|
c42959d040 | ||
|
|
fa6dcd7f83 | ||
|
|
9c6365aa2f | ||
|
|
6e4c4febfb | ||
|
|
732d2aadf8 | ||
|
|
cace817c79 | ||
|
|
e1c361e555 | ||
|
|
502277b1b7 | ||
|
|
57f5a22f0f | ||
|
|
4b18a0e758 | ||
|
|
f6a9a764de | ||
|
|
e65214b097 | ||
|
|
cc47f9a595 | ||
|
|
eb633be437 | ||
|
|
df0dc2e4d1 | ||
|
|
766f4dd661 | ||
|
|
f53fb69ffb | ||
|
|
ba0ec18a33 | ||
|
|
79182cecfd | ||
|
|
8cf82c4b6a | ||
|
|
78d4586033 | ||
|
|
02cf1074f2 | ||
|
|
a881cab469 | ||
|
|
00bd93c026 | ||
|
|
2c10ad7eec | ||
|
|
167051af28 | ||
|
|
eb9c5e9af0 | ||
|
|
2f942a3e37 | ||
|
|
03f97b309a | ||
|
|
c6a962a46b | ||
|
|
1ddf45bbbe | ||
|
|
f0c4cebaca | ||
|
|
87c42ece00 | ||
|
|
4f8fcd3369 | ||
|
|
5b2d91b5b5 | ||
|
|
a84322f9b7 | ||
|
|
2de95bcb63 | ||
|
|
1e9e2facde | ||
|
|
592c67d1f2 | ||
|
|
e91dd29cb2 | ||
|
|
13c9142814 | ||
|
|
ef4f2491f3 | ||
|
|
645555b990 | ||
|
|
839275814c | ||
|
|
9b973e07e2 | ||
|
|
0027385da9 | ||
|
|
4ef77f9050 | ||
|
|
debbdec350 | ||
|
|
bf4ac0c2dd | ||
|
|
cb9e7e63db | ||
|
|
32560af767 | ||
|
|
1e5ac61ff5 | ||
|
|
5315c51197 | ||
|
|
8917f9b9d2 | ||
|
|
c0dc05f26a | ||
|
|
2aa801d906 | ||
|
|
c192ec9109 | ||
|
|
7ab31e36af | ||
|
|
0fd9fb9294 | ||
|
|
059f80bfc4 | ||
|
|
bab2f7282c | ||
|
|
02920b5ac9 | ||
|
|
25b0934cda | ||
|
|
d3c7ea4805 | ||
|
|
82c3d78672 | ||
|
|
97b68609bc | ||
|
|
1d611e618f | ||
|
|
f4b8d385ee | ||
|
|
b7e0923ec4 | ||
|
|
4930ae4ba6 | ||
|
|
d11479ec5f | ||
|
|
901e3c4a20 | ||
|
|
81842462ba | ||
|
|
e15c14cc2e | ||
|
|
f7ddf57f39 | ||
|
|
47e67fda46 | ||
|
|
7d0251952c | ||
|
|
5536f5a8c2 | ||
|
|
2c932fae9d | ||
|
|
24445cf36a | ||
|
|
0feb25c962 | ||
|
|
3abb4d79ba | ||
|
|
1df183deb3 | ||
|
|
77834c1e58 | ||
|
|
d6207705cd | ||
|
|
e4b61aa08d | ||
|
|
736ff2930d | ||
|
|
6aff526d9e | ||
|
|
8101171c97 | ||
|
|
000507c366 | ||
|
|
82fdee45aa | ||
|
|
2419fa43b6 | ||
|
|
acc7619023 | ||
|
|
dcd761ad74 | ||
|
|
9871ecd223 | ||
|
|
56a7fdcfcd | ||
|
|
6325f6db16 | ||
|
|
b253cd45ca | ||
|
|
1724565331 | ||
|
|
00a7beaca2 | ||
|
|
c129bba7e5 | ||
|
|
fb298224fc | ||
|
|
1feed47185 | ||
|
|
923de356e1 | ||
|
|
cea9af4e01 | ||
|
|
0f6d894322 | ||
|
|
9f879164ec | ||
|
|
1ddc4b6ff8 | ||
|
|
58f80120bd | ||
|
|
a0e08e4f41 | ||
|
|
2813d67670 | ||
|
|
c49b134122 | ||
|
|
48ce377b02 | ||
|
|
40de01e8c4 | ||
|
|
2fe88a1e66 | ||
|
|
214117e0e0 | ||
|
|
bc2d3e43f0 | ||
|
|
b3528b2139 | ||
|
|
7ecd067e2b | ||
|
|
576c1d7cc1 | ||
|
|
6320528263 | ||
|
|
6528632861 | ||
|
|
928b3b5471 | ||
|
|
f1c8467e9b | ||
|
|
f5337eba1a | ||
|
|
de28e15805 | ||
|
|
09ba15f9bb | ||
|
|
ba9892941d | ||
|
|
b381c51246 | ||
|
|
64726af69c | ||
|
|
7a4fea7a12 | ||
|
|
db47256cdd | ||
|
|
ba2392997b | ||
|
|
1a1bcb3526 | ||
|
|
997e6c141a | ||
|
|
9a3c997779 | ||
|
|
53ed4d49c2 | ||
|
|
0cee5b54a1 | ||
|
|
3f8e15d16f | ||
|
|
f8f6a1433a | ||
|
|
83188401c5 | ||
|
|
b01367a294 | ||
|
|
d8e0e320f4 | ||
|
|
b033f0d20f | ||
|
|
b71b4225c4 | ||
|
|
2a39f5f0b5 | ||
|
|
e27e690bc8 | ||
|
|
57371ffe5a | ||
|
|
4440ecd433 | ||
|
|
277ad61920 | ||
|
|
0860b1501e | ||
|
|
b06610088a | ||
|
|
aa2f168b73 | ||
|
|
d1f7e5f4a7 | ||
|
|
05a81596e5 | ||
|
|
00d1ca0b62 | ||
|
|
dbd4a5bd98 | ||
|
|
3db34a3346 | ||
|
|
f9890778ad | ||
|
|
e342dae818 | ||
|
|
64e294ef48 | ||
|
|
992bbdfac1 | ||
|
|
a4cd695cc8 | ||
|
|
9f85b3cb4f | ||
|
|
e9fd7d8b8b | ||
|
|
fa1a428133 | ||
|
|
8e18986671 | ||
|
|
a3b97b40ba | ||
|
|
634dd9907d | ||
|
|
1d12dcd243 | ||
|
|
2ec8d6abf0 | ||
|
|
98c19e5934 | ||
|
|
03e7636a18 | ||
|
|
6ce9561ba7 | ||
|
|
b80dd996cc | ||
|
|
63cea88c1d | ||
|
|
f41c75c633 | ||
|
|
20f706f165 | ||
|
|
c74b440922 | ||
|
|
badaab94de | ||
|
|
2be6c603ab | ||
|
|
7700a5a1bf | ||
|
|
687a89e30b | ||
|
|
06a0492226 | ||
|
|
4e4034e054 | ||
|
|
5b06aa518e | ||
|
|
c91fb438bb | ||
|
|
54c9a3ec71 | ||
|
|
cc1babbea6 | ||
|
|
bde67266d4 | ||
|
|
1de1e2fdc2 | ||
|
|
2293574f2e | ||
|
|
3077c21bd9 | ||
|
|
a52ca6e298 | ||
|
|
02e1a29f0c | ||
|
|
1b9ed1c72b | ||
|
|
9564158c32 | ||
|
|
ce1f75aab6 | ||
|
|
a0ce46e702 | ||
|
|
f501fac9cd | ||
|
|
8b95edd91a | ||
|
|
c5e5763014 | ||
|
|
2322ed4b6d | ||
|
|
38d69701a4 | ||
|
|
4dc0f06331 | ||
|
|
ec7bcd9b0c | ||
|
|
24140c4cda | ||
|
|
6909d3ed14 | ||
|
|
cf5feafb1e | ||
|
|
ebc20a86eb | ||
|
|
e792fbe023 | ||
|
|
02b619193d | ||
|
|
e5aab3b707 | ||
|
|
089fcbd0c5 | ||
|
|
62bafb94f9 | ||
|
|
9d6fb98e3b | ||
|
|
7bd9a84aa1 | ||
|
|
328b714306 | ||
|
|
2a979197a0 | ||
|
|
6f7f09f1cd | ||
|
|
f9804c218d | ||
|
|
dfc4498921 | ||
|
|
9049f9cf03 | ||
|
|
79a5f3a89f | ||
|
|
c7cb11e919 | ||
|
|
da81d93930 | ||
|
|
44344612b7 | ||
|
|
7ac4bc52a3 | ||
|
|
9aaa33c224 | ||
|
|
a13e6257c3 | ||
|
|
ef18cb3704 | ||
|
|
d5c7eec4ef | ||
|
|
a2c444e03b | ||
|
|
40c3f9a156 | ||
|
|
bd23d1ab7b | ||
|
|
a1e0041b14 | ||
|
|
7483dbf442 | ||
|
|
0f30e787b3 | ||
|
|
5d50dbb69e | ||
|
|
867ea5a1ac | ||
|
|
52cfc59113 | ||
|
|
789eafa8c2 | ||
|
|
ed712477d6 | ||
|
|
e3cb0a9953 | ||
|
|
743bbfea35 | ||
|
|
e8a5a5bffb | ||
|
|
a97fa9675b | ||
|
|
2418d9a096 | ||
|
|
2a8ed24045 | ||
|
|
f1c91e91b1 | ||
|
|
5405bc4e20 | ||
|
|
47a580d110 | ||
|
|
61a43f7df5 | ||
|
|
21ffcbf2fd | ||
|
|
563c0631ba | ||
|
|
77cbf35625 | ||
|
|
d7972032e4 | ||
|
|
f6dcefe0f8 | ||
|
|
d5a1406095 | ||
|
|
3d3be6bd29 | ||
|
|
52fec5fef0 | ||
|
|
ddb776c80e | ||
|
|
469258ee5e | ||
|
|
4fec2a18a5 | ||
|
|
c7ed29dfa8 | ||
|
|
80cbe5f6e8 | ||
|
|
a64eb0ba97 | ||
|
|
dbb1b82e1b | ||
|
|
f34627f709 | ||
|
|
59451fc4d0 | ||
|
|
dc77b20723 | ||
|
|
51869ce5b2 | ||
|
|
98705608a6 | ||
|
|
8055088d25 | ||
|
|
d0cfaaeb26 | ||
|
|
fbacfce0e4 | ||
|
|
082704ce1f | ||
|
|
71b6311edc | ||
|
|
7e71c60334 | ||
|
|
c5c2600799 | ||
|
|
c6c3cc82e4 | ||
|
|
b17b68034e | ||
|
|
cbd1c05929 | ||
|
|
b14d33ced8 | ||
|
|
a5b1660778 | ||
|
|
d5c4a2887e | ||
|
|
b4b84038ed | ||
|
|
85ce0bb472 | ||
|
|
b0bd64bc10 | ||
|
|
17dd21703d | ||
|
|
767c922083 | ||
|
|
a57ba7e35d | ||
|
|
81c1678ec7 | ||
|
|
1593da4597 | ||
|
|
8359f1983c | ||
|
|
87a20ffede | ||
|
|
c597766390 | ||
|
|
3d10a60502 | ||
|
|
220c534ad4 | ||
|
|
c7604e893e | ||
|
|
b56486d88e | ||
|
|
c99f19251b | ||
|
|
544fa824ea | ||
|
|
dd034edad6 | ||
|
|
2419cf86ee | ||
|
|
61f9573ace | ||
|
|
7595072e85 | ||
|
|
e60e21d9ff | ||
|
|
b46a5c4b2a | ||
|
|
40ff2677c4 | ||
|
|
80b40503fb | ||
|
|
6a501efa75 | ||
|
|
1f6463a9bb | ||
|
|
2d4f4791e0 | ||
|
|
102906f5dd | ||
|
|
6c151d3ebd | ||
|
|
17e6f5b899 | ||
|
|
a38495ce39 | ||
|
|
38629a7676 | ||
|
|
9a4ae2b832 | ||
|
|
3fdcb92dfe | ||
|
|
725f5414ba | ||
|
|
73aceb9697 | ||
|
|
03c89a02ad | ||
|
|
666d4ea260 | ||
|
|
4c58aa2ccf | ||
|
|
26619e5f8d | ||
|
|
57d90a62f7 | ||
|
|
a8b8a1d0b7 | ||
|
|
e4375a6568 | ||
|
|
b8f9a9a311 | ||
|
|
3d7f2bc691 | ||
|
|
e799edaf49 | ||
|
|
be003f7ee4 | ||
|
|
868cb8183c | ||
|
|
b3f94961ea | ||
|
|
12120e94c8 | ||
|
|
49a60bac76 | ||
|
|
f07f0775ac | ||
|
|
e93e58fedb | ||
|
|
8459054ff8 | ||
|
|
43ec897397 | ||
|
|
4b73f859d1 | ||
|
|
969cf25818 | ||
|
|
e25bbd8a0d | ||
|
|
5b11c41434 | ||
|
|
99f21ce46f |
52
DOWNLOAD_AND_VERIFY_ISO.md
Normal file
@@ -0,0 +1,52 @@
|
||||
### 2.4.3-20230711 ISO image built on 2023/07/11
|
||||
|
||||
|
||||
|
||||
### Download and Verify
|
||||
|
||||
2.4.3-20230711 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.3-20230711.iso
|
||||
|
||||
MD5: F481ED39E02A5AF05EB50D319D97A6C7
|
||||
SHA1: 20F9BAA8F73A44C21A8DFE81F36247BCF33CEDA6
|
||||
SHA256: D805522E02CD4941641385F6FF86FAAC240DA6C5FD98F78460348632C7C631B0
|
||||
|
||||
Signature for ISO image:
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.3-20230711.iso.sig
|
||||
|
||||
Signing key:
|
||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
||||
|
||||
For example, here are the steps you can use on most Linux distributions to download and verify our Security Onion ISO image.
|
||||
|
||||
Download and import the signing key:
|
||||
```
|
||||
wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS -O - | gpg --import -
|
||||
```
|
||||
|
||||
Download the signature file for the ISO:
|
||||
```
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.3-20230711.iso.sig
|
||||
```
|
||||
|
||||
Download the ISO image:
|
||||
```
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.3-20230711.iso
|
||||
```
|
||||
|
||||
Verify the downloaded ISO image using the signature file:
|
||||
```
|
||||
gpg --verify securityonion-2.4.3-20230711.iso.sig securityonion-2.4.3-20230711.iso
|
||||
```
|
||||
|
||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||
```
|
||||
gpg: Signature made Tue 11 Jul 2023 06:23:37 PM EDT using RSA key ID FE507013
|
||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||
gpg: WARNING: This key is not certified with a trusted signature!
|
||||
gpg: There is no indication that the signature belongs to the owner.
|
||||
Primary key fingerprint: C804 A93D 36BE 0C73 3EA1 9644 7C10 60B7 FE50 7013
|
||||
```
|
||||
|
||||
Once you've verified the ISO image, you're ready to proceed to our Installation guide:
|
||||
https://docs.securityonion.net/en/2.4/installation.html
|
||||
20
README.md
@@ -1,20 +1,26 @@
|
||||
## Security Onion 2.4 Beta 2
|
||||
## Security Onion 2.4 Beta 4
|
||||
|
||||
Security Onion 2.4 Beta 2 is here!
|
||||
Security Onion 2.4 Beta 4 is here!
|
||||
|
||||
## Screenshots
|
||||
|
||||
Alerts
|
||||

|
||||

|
||||
|
||||
Dashboards
|
||||

|
||||

|
||||
|
||||
Hunt
|
||||

|
||||

|
||||
|
||||
Cases
|
||||

|
||||
PCAP
|
||||

|
||||
|
||||
Grid
|
||||

|
||||
|
||||
Config
|
||||

|
||||
|
||||
### Release Notes
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
### An ISO will be available starting in RC1.
|
||||
@@ -1,13 +0,0 @@
|
||||
logrotate:
|
||||
conf: |
|
||||
daily
|
||||
rotate 14
|
||||
missingok
|
||||
copytruncate
|
||||
compress
|
||||
create
|
||||
extension .log
|
||||
dateext
|
||||
dateyesterday
|
||||
group_conf: |
|
||||
su root socore
|
||||
@@ -1,6 +0,0 @@
|
||||
logstash:
|
||||
pipelines:
|
||||
fleet:
|
||||
config:
|
||||
- so/0012_input_elastic_agent.conf
|
||||
- so/9806_output_lumberjack_fleet.conf.jinja
|
||||
@@ -1,42 +0,0 @@
|
||||
logstash:
|
||||
pipelines:
|
||||
helix:
|
||||
config:
|
||||
- so/0010_input_hhbeats.conf
|
||||
- so/1033_preprocess_snort.conf
|
||||
- so/1100_preprocess_bro_conn.conf
|
||||
- so/1101_preprocess_bro_dhcp.conf
|
||||
- so/1102_preprocess_bro_dns.conf
|
||||
- so/1103_preprocess_bro_dpd.conf
|
||||
- so/1104_preprocess_bro_files.conf
|
||||
- so/1105_preprocess_bro_ftp.conf
|
||||
- so/1106_preprocess_bro_http.conf
|
||||
- so/1107_preprocess_bro_irc.conf
|
||||
- so/1108_preprocess_bro_kerberos.conf
|
||||
- so/1109_preprocess_bro_notice.conf
|
||||
- so/1110_preprocess_bro_rdp.conf
|
||||
- so/1111_preprocess_bro_signatures.conf
|
||||
- so/1112_preprocess_bro_smtp.conf
|
||||
- so/1113_preprocess_bro_snmp.conf
|
||||
- so/1114_preprocess_bro_software.conf
|
||||
- so/1115_preprocess_bro_ssh.conf
|
||||
- so/1116_preprocess_bro_ssl.conf
|
||||
- so/1117_preprocess_bro_syslog.conf
|
||||
- so/1118_preprocess_bro_tunnel.conf
|
||||
- so/1119_preprocess_bro_weird.conf
|
||||
- so/1121_preprocess_bro_mysql.conf
|
||||
- so/1122_preprocess_bro_socks.conf
|
||||
- so/1123_preprocess_bro_x509.conf
|
||||
- so/1124_preprocess_bro_intel.conf
|
||||
- so/1125_preprocess_bro_modbus.conf
|
||||
- so/1126_preprocess_bro_sip.conf
|
||||
- so/1127_preprocess_bro_radius.conf
|
||||
- so/1128_preprocess_bro_pe.conf
|
||||
- so/1129_preprocess_bro_rfb.conf
|
||||
- so/1130_preprocess_bro_dnp3.conf
|
||||
- so/1131_preprocess_bro_smb_files.conf
|
||||
- so/1132_preprocess_bro_smb_mapping.conf
|
||||
- so/1133_preprocess_bro_ntlm.conf
|
||||
- so/1134_preprocess_bro_dce_rpc.conf
|
||||
- so/8001_postprocess_common_ip_augmentation.conf
|
||||
- so/9997_output_helix.conf.jinja
|
||||
@@ -1,8 +0,0 @@
|
||||
logstash:
|
||||
pipelines:
|
||||
manager:
|
||||
config:
|
||||
- so/0011_input_endgame.conf
|
||||
- so/0012_input_elastic_agent.conf
|
||||
- so/0013_input_lumberjack_fleet.conf
|
||||
- so/9999_output_redis.conf.jinja
|
||||
@@ -2,7 +2,7 @@
|
||||
{% set cached_grains = salt.saltutil.runner('cache.grains', tgt='*') %}
|
||||
{% for minionid, ip in salt.saltutil.runner(
|
||||
'mine.get',
|
||||
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-searchnode or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix ',
|
||||
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-searchnode or G@role:so-heavynode or G@role:so-receiver or G@role:so-fleet ',
|
||||
fun='network.ip_addrs',
|
||||
tgt_type='compound') | dictsort()
|
||||
%}
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
logstash:
|
||||
pipelines:
|
||||
receiver:
|
||||
config:
|
||||
- so/0011_input_endgame.conf
|
||||
- so/0012_input_elastic_agent.conf
|
||||
- so/9999_output_redis.conf.jinja
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
logstash:
|
||||
pipelines:
|
||||
search:
|
||||
config:
|
||||
- so/0900_input_redis.conf.jinja
|
||||
- so/9805_output_elastic_agent.conf.jinja
|
||||
- so/9900_output_endgame.conf.jinja
|
||||
14
pillar/soc/license.sls
Normal file
@@ -0,0 +1,14 @@
|
||||
# Copyright Jason Ertel (github.com/jertel).
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with
|
||||
# the Elastic License 2.0.
|
||||
|
||||
# Note: Per the Elastic License 2.0, the second limitation states:
|
||||
#
|
||||
# "You may not move, change, disable, or circumvent the license key functionality
|
||||
# in the software, and you may not remove or obscure any functionality in the
|
||||
# software that is protected by the license key."
|
||||
|
||||
# This file is generated by Security Onion and contains a list of license-enabled features.
|
||||
features: []
|
||||
205
pillar/top.sls
@@ -1,44 +1,26 @@
|
||||
base:
|
||||
'*':
|
||||
- patch.needs_restarting
|
||||
- ntp.soc_ntp
|
||||
- ntp.adv_ntp
|
||||
- logrotate
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- docker.soc_docker
|
||||
- docker.adv_docker
|
||||
- firewall.soc_firewall
|
||||
- firewall.adv_firewall
|
||||
- influxdb.token
|
||||
- logrotate.soc_logrotate
|
||||
- logrotate.adv_logrotate
|
||||
- nginx.soc_nginx
|
||||
- nginx.adv_nginx
|
||||
- node_data.ips
|
||||
- ntp.soc_ntp
|
||||
- ntp.adv_ntp
|
||||
- patch.needs_restarting
|
||||
- patch.soc_patch
|
||||
- patch.adv_patch
|
||||
- sensoroni.soc_sensoroni
|
||||
- sensoroni.adv_sensoroni
|
||||
- telegraf.soc_telegraf
|
||||
- telegraf.adv_telegraf
|
||||
- influxdb.token
|
||||
- node_data.ips
|
||||
|
||||
'* and not *_eval and not *_import':
|
||||
- logstash.nodes
|
||||
|
||||
'*_eval or *_heavynode or *_sensor or *_standalone or *_import':
|
||||
- match: compound
|
||||
- zeek
|
||||
- bpf.soc_bpf
|
||||
- bpf.adv_bpf
|
||||
|
||||
'*_managersearch or *_heavynode':
|
||||
- match: compound
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.search
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
|
||||
'*_manager':
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
|
||||
'*_manager or *_managersearch':
|
||||
- match: compound
|
||||
@@ -49,14 +31,20 @@ base:
|
||||
- kibana.secrets
|
||||
{% endif %}
|
||||
- secrets
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- manager.soc_manager
|
||||
- manager.adv_manager
|
||||
- idstools.soc_idstools
|
||||
- idstools.adv_idstools
|
||||
- logstash.nodes
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- soc.soc_soc
|
||||
- soc.adv_soc
|
||||
- soc.license
|
||||
- soctopus.soc_soctopus
|
||||
- soctopus.adv_soctopus
|
||||
- kibana.soc_kibana
|
||||
- kibana.adv_kibana
|
||||
- kratos.soc_kratos
|
||||
- kratos.adv_kratos
|
||||
- redis.soc_redis
|
||||
@@ -65,17 +53,31 @@ base:
|
||||
- influxdb.adv_influxdb
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- elasticfleet.soc_elasticfleet
|
||||
- elasticfleet.adv_elasticfleet
|
||||
- elastalert.soc_elastalert
|
||||
- elastalert.adv_elastalert
|
||||
- backup.soc_backup
|
||||
- backup.adv_backup
|
||||
- firewall.soc_firewall
|
||||
- firewall.adv_firewall
|
||||
- curator.soc_curator
|
||||
- curator.adv_curator
|
||||
- soctopus.soc_soctopus
|
||||
- soctopus.adv_soctopus
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_sensor':
|
||||
- healthcheck.sensor
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- strelka.soc_strelka
|
||||
- strelka.adv_strelka
|
||||
- zeek.soc_zeek
|
||||
- zeek.adv_zeek
|
||||
- bpf.soc_bpf
|
||||
- bpf.adv_bpf
|
||||
- pcap.soc_pcap
|
||||
- pcap.adv_pcap
|
||||
- suricata.soc_suricata
|
||||
- suricata.adv_suricata
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
@@ -89,16 +91,28 @@ base:
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
||||
- kibana.secrets
|
||||
{% endif %}
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- kratos.soc_kratos
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- elasticfleet.soc_elasticfleet
|
||||
- elasticfleet.adv_elasticfleet
|
||||
- elastalert.soc_elastalert
|
||||
- elastalert.adv_elastalert
|
||||
- manager.soc_manager
|
||||
- manager.adv_manager
|
||||
- idstools.soc_idstools
|
||||
- idstools.adv_idstools
|
||||
- soc.soc_soc
|
||||
- soc.adv_soc
|
||||
- soc.license
|
||||
- soctopus.soc_soctopus
|
||||
- soctopus.adv_soctopus
|
||||
- kibana.soc_kibana
|
||||
- kibana.adv_kibana
|
||||
- strelka.soc_strelka
|
||||
- strelka.adv_strelka
|
||||
- curator.soc_curator
|
||||
- curator.adv_curator
|
||||
- kratos.soc_kratos
|
||||
- kratos.adv_kratos
|
||||
- redis.soc_redis
|
||||
@@ -107,15 +121,19 @@ base:
|
||||
- influxdb.adv_influxdb
|
||||
- backup.soc_backup
|
||||
- backup.adv_backup
|
||||
- firewall.soc_firewall
|
||||
- firewall.adv_firewall
|
||||
- zeek.soc_zeek
|
||||
- zeek.adv_zeek
|
||||
- bpf.soc_bpf
|
||||
- bpf.adv_bpf
|
||||
- pcap.soc_pcap
|
||||
- pcap.adv_pcap
|
||||
- suricata.soc_suricata
|
||||
- suricata.adv_suricata
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_standalone':
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.search
|
||||
- logstash.nodes
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
@@ -127,8 +145,6 @@ base:
|
||||
{% endif %}
|
||||
- secrets
|
||||
- healthcheck.standalone
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- idstools.soc_idstools
|
||||
- idstools.adv_idstools
|
||||
- kratos.soc_kratos
|
||||
@@ -139,52 +155,82 @@ base:
|
||||
- influxdb.adv_influxdb
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- elasticfleet.soc_elasticfleet
|
||||
- elasticfleet.adv_elasticfleet
|
||||
- elastalert.soc_elastalert
|
||||
- elastalert.adv_elastalert
|
||||
- manager.soc_manager
|
||||
- manager.adv_manager
|
||||
- soc.soc_soc
|
||||
- soc.adv_soc
|
||||
- soc.license
|
||||
- soctopus.soc_soctopus
|
||||
- soctopus.adv_soctopus
|
||||
- kibana.soc_kibana
|
||||
- kibana.adv_kibana
|
||||
- strelka.soc_strelka
|
||||
- strelka.adv_strelka
|
||||
- curator.soc_curator
|
||||
- curator.adv_curator
|
||||
- backup.soc_backup
|
||||
- backup.adv_backup
|
||||
- firewall.soc_firewall
|
||||
- firewall.adv_firewall
|
||||
- zeek.soc_zeek
|
||||
- zeek.adv_zeek
|
||||
- bpf.soc_bpf
|
||||
- bpf.adv_bpf
|
||||
- pcap.soc_pcap
|
||||
- pcap.adv_pcap
|
||||
- suricata.soc_suricata
|
||||
- suricata.adv_suricata
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_heavynode':
|
||||
- elasticsearch.auth
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- logstash.nodes
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- curator.soc_curator
|
||||
- curator.adv_curator
|
||||
- redis.soc_redis
|
||||
- redis.adv_redis
|
||||
- zeek.soc_zeek
|
||||
- zeek.adv_zeek
|
||||
- bpf.soc_bpf
|
||||
- bpf.adv_bpf
|
||||
- pcap.soc_pcap
|
||||
- pcap.adv_pcap
|
||||
- suricata.soc_suricata
|
||||
- suricata.adv_suricata
|
||||
- strelka.soc_strelka
|
||||
- strelka.adv_strelka
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_idh':
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- idh.soc_idh
|
||||
- idh.adv_idh
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_searchnode':
|
||||
- logstash
|
||||
- logstash.search
|
||||
- logstash.nodes
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
{% endif %}
|
||||
- redis.soc_redis
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- redis.adv_redis
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_receiver':
|
||||
- logstash
|
||||
- logstash.receiver
|
||||
- logstash.nodes
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
@@ -192,8 +238,6 @@ base:
|
||||
{% endif %}
|
||||
- redis.soc_redis
|
||||
- redis.adv_redis
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
@@ -209,11 +253,21 @@ base:
|
||||
- kratos.soc_kratos
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- elasticfleet.soc_elasticfleet
|
||||
- elasticfleet.adv_elasticfleet
|
||||
- elastalert.soc_elastalert
|
||||
- elastalert.adv_elastalert
|
||||
- manager.soc_manager
|
||||
- manager.adv_manager
|
||||
- soc.soc_soc
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- soc.adv_soc
|
||||
- soc.license
|
||||
- soctopus.soc_soctopus
|
||||
- soctopus.adv_soctopus
|
||||
- kibana.soc_kibana
|
||||
- kibana.adv_kibana
|
||||
- curator.soc_curator
|
||||
- curator.adv_curator
|
||||
- backup.soc_backup
|
||||
- backup.adv_backup
|
||||
- kratos.soc_kratos
|
||||
@@ -222,23 +276,30 @@ base:
|
||||
- redis.adv_redis
|
||||
- influxdb.soc_influxdb
|
||||
- influxdb.adv_influxdb
|
||||
- firewall.soc_firewall
|
||||
- firewall.adv_firewall
|
||||
- zeek.soc_zeek
|
||||
- zeek.adv_zeek
|
||||
- bpf.soc_bpf
|
||||
- bpf.adv_bpf
|
||||
- pcap.soc_pcap
|
||||
- pcap.adv_pcap
|
||||
- suricata.soc_suricata
|
||||
- suricata.adv_suricata
|
||||
- strelka.soc_strelka
|
||||
- strelka.adv_strelka
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_fleet':
|
||||
- global.soc_global
|
||||
- global.adv_global
|
||||
- backup.soc_backup
|
||||
- backup.adv_backup
|
||||
- logstash
|
||||
- logstash.fleet
|
||||
- logstash.nodes
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticfleet.soc_elasticfleet
|
||||
- elasticfleet.adv_elasticfleet
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
'*_workstation':
|
||||
'*_desktop':
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
|
||||
@@ -3,16 +3,6 @@
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
|
||||
{% set ZEEKVER = salt['pillar.get']('global:mdengine', '') %}
|
||||
{% set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||
{% set ELASTALERT = salt['pillar.get']('elastalert:enabled', True) %}
|
||||
{% set ELASTICSEARCH = salt['pillar.get']('elasticsearch:enabled', True) %}
|
||||
{% set KIBANA = salt['pillar.get']('kibana:enabled', True) %}
|
||||
{% set LOGSTASH = salt['pillar.get']('logstash:enabled', True) %}
|
||||
{% set CURATOR = salt['pillar.get']('curator:enabled', True) %}
|
||||
{% set REDIS = salt['pillar.get']('redis:enabled', True) %}
|
||||
{% set STRELKA = salt['pillar.get']('strelka:enabled', '0') %}
|
||||
{% set ISAIRGAP = salt['pillar.get']('global:airgap', False) %}
|
||||
{% import_yaml 'salt/minion.defaults.yaml' as saltversion %}
|
||||
{% set saltversion = saltversion.salt.minion.version %}
|
||||
@@ -35,6 +25,7 @@
|
||||
'soc',
|
||||
'kratos',
|
||||
'elasticfleet',
|
||||
'elastic-fleet-package-registry',
|
||||
'firewall',
|
||||
'idstools',
|
||||
'suricata.manager',
|
||||
@@ -55,23 +46,7 @@
|
||||
'pcap',
|
||||
'suricata',
|
||||
'healthcheck',
|
||||
'schedule',
|
||||
'tcpreplay',
|
||||
'docker_clean'
|
||||
],
|
||||
'so-helixsensor': [
|
||||
'salt.master',
|
||||
'ca',
|
||||
'ssl',
|
||||
'registry',
|
||||
'telegraf',
|
||||
'firewall',
|
||||
'idstools',
|
||||
'suricata.manager',
|
||||
'zeek',
|
||||
'redis',
|
||||
'elasticsearch',
|
||||
'logstash',
|
||||
'elasticagent',
|
||||
'schedule',
|
||||
'tcpreplay',
|
||||
'docker_clean'
|
||||
@@ -105,7 +80,8 @@
|
||||
'schedule',
|
||||
'tcpreplay',
|
||||
'docker_clean',
|
||||
'elasticfleet'
|
||||
'elasticfleet',
|
||||
'elastic-fleet-package-registry'
|
||||
],
|
||||
'so-manager': [
|
||||
'salt.master',
|
||||
@@ -119,6 +95,7 @@
|
||||
'soc',
|
||||
'kratos',
|
||||
'elasticfleet',
|
||||
'elastic-fleet-package-registry',
|
||||
'firewall',
|
||||
'idstools',
|
||||
'suricata.manager',
|
||||
@@ -137,6 +114,7 @@
|
||||
'influxdb',
|
||||
'soc',
|
||||
'kratos',
|
||||
'elastic-fleet-package-registry',
|
||||
'elasticfleet',
|
||||
'firewall',
|
||||
'manager',
|
||||
@@ -166,6 +144,7 @@
|
||||
'influxdb',
|
||||
'soc',
|
||||
'kratos',
|
||||
'elastic-fleet-package-registry',
|
||||
'elasticfleet',
|
||||
'firewall',
|
||||
'idstools',
|
||||
@@ -208,31 +187,31 @@
|
||||
'schedule',
|
||||
'docker_clean'
|
||||
],
|
||||
'so-workstation': [
|
||||
'so-desktop': [
|
||||
],
|
||||
}, grain='role') %}
|
||||
|
||||
{% if (PLAYBOOK != 0) and grains.role in ['so-eval', 'so-manager', 'so-managersearch', 'so-standalone'] %}
|
||||
{% if grains.role in ['so-eval', 'so-manager', 'so-managersearch', 'so-standalone'] %}
|
||||
{% do allowed_states.append('mysql') %}
|
||||
{% endif %}
|
||||
|
||||
{%- if ZEEKVER != 'SURICATA' and grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
||||
{%- if grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
||||
{% do allowed_states.append('zeek') %}
|
||||
{%- endif %}
|
||||
|
||||
{% if STRELKA and grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
||||
{% if grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
||||
{% do allowed_states.append('strelka') %}
|
||||
{% endif %}
|
||||
|
||||
{% if ELASTICSEARCH and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-searchnode', 'so-managersearch', 'so-heavynode', 'so-import'] %}
|
||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-searchnode', 'so-managersearch', 'so-heavynode', 'so-import'] %}
|
||||
{% do allowed_states.append('elasticsearch') %}
|
||||
{% endif %}
|
||||
|
||||
{% if ELASTICSEARCH and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
||||
{% do allowed_states.append('elasticsearch.auth') %}
|
||||
{% endif %}
|
||||
|
||||
{% if KIBANA and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
||||
{% do allowed_states.append('kibana') %}
|
||||
{% do allowed_states.append('kibana.secrets') %}
|
||||
{% endif %}
|
||||
@@ -241,23 +220,19 @@
|
||||
{% do allowed_states.append('curator') %}
|
||||
{% endif %}
|
||||
|
||||
{% if ELASTALERT and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% do allowed_states.append('elastalert') %}
|
||||
{% endif %}
|
||||
|
||||
{% if (PLAYBOOK !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% do allowed_states.append('playbook') %}
|
||||
{% endif %}
|
||||
|
||||
{% if (PLAYBOOK !=0) and grains.role in ['so-eval'] %}
|
||||
{% do allowed_states.append('redis') %}
|
||||
{% endif %}
|
||||
|
||||
{% if LOGSTASH and grains.role in ['so-helixsensor', 'so-manager', 'so-standalone', 'so-searchnode', 'so-managersearch', 'so-heavynode', 'so-receiver'] %}
|
||||
{% if grains.role in ['so-manager', 'so-standalone', 'so-searchnode', 'so-managersearch', 'so-heavynode', 'so-receiver'] %}
|
||||
{% do allowed_states.append('logstash') %}
|
||||
{% endif %}
|
||||
|
||||
{% if REDIS and grains.role in ['so-manager', 'so-standalone', 'so-managersearch', 'so-heavynode', 'so-receiver'] %}
|
||||
{% if grains.role in ['so-manager', 'so-standalone', 'so-managersearch', 'so-heavynode', 'so-receiver', 'so-eval'] %}
|
||||
{% do allowed_states.append('redis') %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ pki_private_key:
|
||||
- name: /etc/pki/ca.key
|
||||
- keysize: 4096
|
||||
- passphrase:
|
||||
- cipher: aes_256_cbc
|
||||
- backup: True
|
||||
{% if salt['file.file_exists']('/etc/pki/ca.key') -%}
|
||||
- prereq:
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/bin/bash
|
||||
/usr/sbin/logrotate -f /opt/so/conf/log-rotate.conf > /dev/null 2>&1
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/bin/bash
|
||||
/usr/sbin/logrotate -f /opt/so/conf/sensor-rotate.conf > /dev/null 2>&1
|
||||
@@ -1,79 +0,0 @@
|
||||
The following GUI tools are available on the analyst workstation:
|
||||
|
||||
chromium
|
||||
url: https://www.chromium.org/Home
|
||||
To run chromium, click Applications > Internet > Chromium Web Browser
|
||||
|
||||
Wireshark
|
||||
url: https://www.wireshark.org/
|
||||
To run Wireshark, click Applications > Internet > Wireshark Network Analyzer
|
||||
|
||||
NetworkMiner
|
||||
url: https://www.netresec.com
|
||||
To run NetworkMiner, click Applications > Internet > NetworkMiner
|
||||
|
||||
The following CLI tools are available on the analyst workstation:
|
||||
|
||||
bit-twist
|
||||
url: http://bittwist.sourceforge.net
|
||||
To run bit-twist, open a terminal and type: bittwist -h
|
||||
|
||||
chaosreader
|
||||
url: http://chaosreader.sourceforge.net
|
||||
To run chaosreader, open a terminal and type: chaosreader -h
|
||||
|
||||
dnsiff
|
||||
url: https://www.monkey.org/~dugsong/dsniff/
|
||||
To run dsniff, open a terminal and type: dsniff -h
|
||||
|
||||
foremost
|
||||
url: http://foremost.sourceforge.net
|
||||
To run foremost, open a terminal and type: foremost -h
|
||||
|
||||
hping3
|
||||
url: http://www.hping.org/hping3.html
|
||||
To run hping3, open a terminal and type: hping3 -h
|
||||
|
||||
netsed
|
||||
url: http://silicone.homelinux.org/projects/netsed/
|
||||
To run netsed, open a terminal and type: netsed -h
|
||||
|
||||
ngrep
|
||||
url: https://github.com/jpr5/ngrep
|
||||
To run ngrep, open a terminal and type: ngrep -h
|
||||
|
||||
scapy
|
||||
url: http://www.secdev.org/projects/scapy/
|
||||
To run scapy, open a terminal and type: scapy
|
||||
|
||||
ssldump
|
||||
url: http://www.rtfm.com/ssldump/
|
||||
To run ssldump, open a terminal and type: ssldump -h
|
||||
|
||||
sslsplit
|
||||
url: https://github.com/droe/sslsplit
|
||||
To run sslsplit, open a terminal and type: sslsplit -h
|
||||
|
||||
tcpdump
|
||||
url: http://www.tcpdump.org
|
||||
To run tcpdump, open a terminal and type: tcpdump -h
|
||||
|
||||
tcpflow
|
||||
url: https://github.com/simsong/tcpflow
|
||||
To run tcpflow, open a terminal and type: tcpflow -h
|
||||
|
||||
tcpstat
|
||||
url: https://frenchfries.net/paul/tcpstat/
|
||||
To run tcpstat, open a terminal and type: tcpstat -h
|
||||
|
||||
tcptrace
|
||||
url: http://www.tcptrace.org
|
||||
To run tcptrace, open a terminal and type: tcptrace -h
|
||||
|
||||
tcpxtract
|
||||
url: http://tcpxtract.sourceforge.net/
|
||||
To run tcpxtract, open a terminal and type: tcpxtract -h
|
||||
|
||||
whois
|
||||
url: http://www.linux.it/~md/software/
|
||||
To run whois, open a terminal and type: whois -h
|
||||
@@ -1,37 +0,0 @@
|
||||
{%- set logrotate_conf = salt['pillar.get']('logrotate:conf') %}
|
||||
{%- set group_conf = salt['pillar.get']('logrotate:group_conf') %}
|
||||
|
||||
|
||||
/opt/so/log/aptcacher-ng/*.log
|
||||
/opt/so/log/idstools/*.log
|
||||
/opt/so/log/nginx/*.log
|
||||
/opt/so/log/soc/*.log
|
||||
/opt/so/log/kratos/*.log
|
||||
/opt/so/log/kibana/*.log
|
||||
/opt/so/log/influxdb/*.log
|
||||
/opt/so/log/elastalert/*.log
|
||||
/opt/so/log/soctopus/*.log
|
||||
/opt/so/log/curator/*.log
|
||||
/opt/so/log/fleet/*.log
|
||||
/opt/so/log/suricata/*.log
|
||||
/opt/so/log/mysql/*.log
|
||||
/opt/so/log/telegraf/*.log
|
||||
/opt/so/log/redis/*.log
|
||||
/opt/so/log/sensoroni/*.log
|
||||
/opt/so/log/stenographer/*.log
|
||||
/opt/so/log/salt/so-salt-minion-check
|
||||
/opt/so/log/salt/minion
|
||||
/opt/so/log/salt/master
|
||||
/opt/so/log/logscan/*.log
|
||||
/nsm/idh/*.log
|
||||
{
|
||||
{{ logrotate_conf | indent(width=4) }}
|
||||
}
|
||||
|
||||
# Playbook's log directory needs additional configuration
|
||||
# because Playbook requires a more permissive directory
|
||||
/opt/so/log/playbook/*.log
|
||||
{
|
||||
{{ logrotate_conf | indent(width=4) }}
|
||||
{{ group_conf | indent(width=4) }}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
/opt/so/log/sensor_clean.log
|
||||
{
|
||||
daily
|
||||
rotate 2
|
||||
missingok
|
||||
nocompress
|
||||
create
|
||||
sharedscripts
|
||||
}
|
||||
|
||||
/nsm/strelka/log/strelka.log
|
||||
{
|
||||
daily
|
||||
rotate 14
|
||||
missingok
|
||||
copytruncate
|
||||
compress
|
||||
create
|
||||
extension .log
|
||||
dateext
|
||||
dateyesterday
|
||||
}
|
||||
@@ -10,6 +10,10 @@ include:
|
||||
- manager.elasticsearch # needed for elastic_curl_config state
|
||||
{% endif %}
|
||||
|
||||
net.core.wmem_default:
|
||||
sysctl.present:
|
||||
- value: 26214400
|
||||
|
||||
# Remove variables.txt from /tmp - This is temp
|
||||
rmvariablesfile:
|
||||
file.absent:
|
||||
@@ -49,13 +53,12 @@ so-status.conf:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- unless: ls /opt/so/conf/so-status/so-status.conf
|
||||
|
||||
sosaltstackperms:
|
||||
socore_opso_perms:
|
||||
file.directory:
|
||||
- name: /opt/so/saltstack
|
||||
- name: /opt/so
|
||||
- user: 939
|
||||
- group: 939
|
||||
- dir_mode: 770
|
||||
|
||||
|
||||
so_log_perms:
|
||||
file.directory:
|
||||
- name: /opt/so/log
|
||||
@@ -112,21 +115,23 @@ elastic_curl_config:
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
# Sync some Utilities
|
||||
utilsyncscripts:
|
||||
|
||||
common_sbin:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- user: root
|
||||
- group: root
|
||||
- source: salt://common/tools/sbin
|
||||
- user: 939
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
|
||||
common_sbin_jinja:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- source: salt://common/tools/sbin_jinja
|
||||
- user: 939
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
- source: salt://common/tools/sbin
|
||||
- exclude_pat:
|
||||
- so-common
|
||||
- so-firewall
|
||||
- so-image-common
|
||||
- soup
|
||||
- so-status
|
||||
|
||||
so-status_script:
|
||||
file.managed:
|
||||
@@ -146,56 +151,8 @@ so-sensor-clean:
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
sensorrotatescript:
|
||||
file.managed:
|
||||
- name: /usr/local/bin/sensor-rotate
|
||||
- source: salt://common/cron/sensor-rotate
|
||||
- mode: 755
|
||||
|
||||
sensorrotateconf:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/sensor-rotate.conf
|
||||
- source: salt://common/files/sensor-rotate.conf
|
||||
- mode: 644
|
||||
|
||||
sensor-rotate:
|
||||
cron.present:
|
||||
- name: /usr/local/bin/sensor-rotate
|
||||
- identifier: sensor-rotate
|
||||
- user: root
|
||||
- minute: '1'
|
||||
- hour: '0'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
{% endif %}
|
||||
|
||||
commonlogrotatescript:
|
||||
file.managed:
|
||||
- name: /usr/local/bin/common-rotate
|
||||
- source: salt://common/cron/common-rotate
|
||||
- mode: 755
|
||||
|
||||
commonlogrotateconf:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/log-rotate.conf
|
||||
- source: salt://common/files/log-rotate.conf
|
||||
- template: jinja
|
||||
- mode: 644
|
||||
|
||||
common-rotate:
|
||||
cron.present:
|
||||
- name: /usr/local/bin/common-rotate
|
||||
- identifier: common-rotate
|
||||
- user: root
|
||||
- minute: '1'
|
||||
- hour: '0'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
# Create the status directory
|
||||
sostatusdir:
|
||||
file.directory:
|
||||
|
||||
@@ -8,6 +8,15 @@ soup_scripts:
|
||||
- source: salt://common/tools/sbin
|
||||
- include_pat:
|
||||
- so-common
|
||||
- so-firewall
|
||||
- so-image-common
|
||||
- soup
|
||||
|
||||
soup_manager_scripts:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- user: root
|
||||
- group: root
|
||||
- file_mode: 755
|
||||
- source: salt://manager/tools/sbin
|
||||
- include_pat:
|
||||
- so-firewall
|
||||
- soup
|
||||
@@ -5,6 +5,7 @@
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
ELASTIC_AGENT_TARBALL_VERSION="8.7.1"
|
||||
DEFAULT_SALT_DIR=/opt/so/saltstack/default
|
||||
DOC_BASE_URL="https://docs.securityonion.net/en/2.4"
|
||||
|
||||
@@ -160,39 +161,6 @@ disable_fastestmirror() {
|
||||
sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
|
||||
}
|
||||
|
||||
elastic_fleet_integration_create() {
|
||||
|
||||
JSON_STRING=$1
|
||||
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/package_policies" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING"
|
||||
}
|
||||
|
||||
elastic_fleet_policy_create() {
|
||||
|
||||
NAME=$1
|
||||
DESC=$2
|
||||
FLEETSERVER=$3
|
||||
|
||||
JSON_STRING=$( jq -n \
|
||||
--arg NAME "$NAME" \
|
||||
--arg DESC "$DESC" \
|
||||
--arg FLEETSERVER "$FLEETSERVER" \
|
||||
'{"name": $NAME,"id":$NAME,"description":$DESC,"namespace":"default","monitoring_enabled":["logs"],"inactivity_timeout":1209600,"has_fleet_server":$FLEETSERVER}'
|
||||
)
|
||||
# Create Fleet Policy
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/agent_policies" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING"
|
||||
|
||||
}
|
||||
|
||||
elastic_fleet_policy_update() {
|
||||
|
||||
POLICYID=$1
|
||||
JSON_STRING=$2
|
||||
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/agent_policies/$POLICYID" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING"
|
||||
}
|
||||
|
||||
|
||||
elastic_license() {
|
||||
|
||||
read -r -d '' message <<- EOM
|
||||
@@ -238,7 +206,7 @@ gpg_rpm_import() {
|
||||
local RPMKEYSLOC="$UPDATE_DIR/salt/repo/client/files/rocky/keys"
|
||||
fi
|
||||
|
||||
RPMKEYS=('RPM-GPG-KEY-EPEL-9' 'SALT-PROJECT-GPG-PUBKEY-2023.pub' 'docker.pub' 'securityonion.pub')
|
||||
RPMKEYS=('RPM-GPG-KEY-rockyofficial' 'RPM-GPG-KEY-EPEL-9' 'SALT-PROJECT-GPG-PUBKEY-2023.pub' 'docker.pub' 'securityonion.pub')
|
||||
|
||||
for RPMKEY in "${RPMKEYS[@]}"; do
|
||||
rpm --import $RPMKEYSLOC/$RPMKEY
|
||||
@@ -275,7 +243,7 @@ is_manager_node() {
|
||||
is_sensor_node() {
|
||||
# Check to see if this is a sensor (forward) node
|
||||
is_single_node_grid && return 0
|
||||
grep "role: so-" /etc/salt/grains | grep -E "sensor|heavynode|helix" &> /dev/null
|
||||
grep "role: so-" /etc/salt/grains | grep -E "sensor|heavynode" &> /dev/null
|
||||
}
|
||||
|
||||
is_single_node_grid() {
|
||||
@@ -333,6 +301,17 @@ lookup_role() {
|
||||
echo ${pieces[1]}
|
||||
}
|
||||
|
||||
is_feature_enabled() {
|
||||
feature=$1
|
||||
enabled=$(lookup_salt_value features)
|
||||
for cur in $enabled; do
|
||||
if [[ "$feature" == "$cur" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
require_manager() {
|
||||
if is_manager_node; then
|
||||
echo "This is a manager, so we can proceed."
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
|
||||
#so-elastic-agent-gen-installers $FleetHost $EnrollmentToken
|
||||
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
ENROLLMENTOKEN=$(curl -K /opt/so/conf/elasticsearch/curl.config -L "localhost:5601/api/fleet/enrollment_api_keys" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' | jq .list | jq -r -c '.[] | select(.policy_id | contains("endpoints")) | .api_key')
|
||||
|
||||
#FLEETHOST=$(lookup_pillar "server:url" "elasticfleet")
|
||||
FLEETHOST="{{ GLOBALS.manager_ip }}"
|
||||
|
||||
#FLEETHOST=$1
|
||||
#ENROLLMENTOKEN=$2
|
||||
CONTAINERGOOS=( "linux" "darwin" "windows" )
|
||||
|
||||
#rm -rf /tmp/elastic-agent-workspace
|
||||
#mkdir -p /tmp/elastic-agent-workspace
|
||||
|
||||
for OS in "${CONTAINERGOOS[@]}"
|
||||
do
|
||||
printf "\n\nGenerating $OS Installer..."
|
||||
#cp /opt/so/saltstack/default/salt/elasticfleet/files/elastic-agent/so-elastic-agent-*-$OS-x86_64.tar.gz /tmp/elastic-agent-workspace/$OS.tar.gz
|
||||
docker run -e CGO_ENABLED=0 -e GOOS=$OS \
|
||||
--mount type=bind,source=/etc/ssl/certs/,target=/workspace/files/cert/ \
|
||||
--mount type=bind,source=/opt/so/saltstack/local/salt/elasticfleet/files/so_agent-installers/,target=/output/ \
|
||||
{{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elastic-agent-builder:{{ GLOBALS.so_version }} go build -ldflags "-X main.fleetHost=$FLEETHOST -X main.enrollmentToken=$ENROLLMENTOKEN" -o /output/so-elastic-agent_$OS
|
||||
printf "\n $OS Installer Generated..."
|
||||
done
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
# Initial Endpoints
|
||||
for INTEGRATION in /opt/so/saltstack/default/salt/elasticfleet/files/integrations/endpoints-initial/*.json
|
||||
do
|
||||
printf "\n\nInitial Endpoint Policy - Loading $INTEGRATION\n"
|
||||
elastic_fleet_integration_create "@$INTEGRATION"
|
||||
done
|
||||
|
||||
# Grid Nodes
|
||||
for INTEGRATION in /opt/so/saltstack/default/salt/elasticfleet/files/integrations/grid-nodes/*.json
|
||||
do
|
||||
printf "\n\nGrid Nodes Policy - Loading $INTEGRATION\n"
|
||||
elastic_fleet_integration_create "@$INTEGRATION"
|
||||
done
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
|
||||
{%- if grains['role'] in ['so-eval','so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode', 'so-import']%}
|
||||
/usr/sbin/so-restart elasticsearch $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval', 'so-manager', 'so-managersearch', 'so-standalone', 'so-import']%}
|
||||
/usr/sbin/so-restart kibana $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode']%}
|
||||
/usr/sbin/so-restart logstash $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode']%}
|
||||
/usr/sbin/so-restart curator $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval','so-manager', 'so-managersearch', 'so-standalone']%}
|
||||
/usr/sbin/so-restart elastalert $1
|
||||
{%- endif %}
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
|
||||
{%- if grains['role'] in ['so-eval','so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode', 'so-import']%}
|
||||
/usr/sbin/so-start elasticsearch $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval', 'so-manager', 'so-managersearch', 'so-standalone', 'so-import']%}
|
||||
/usr/sbin/so-start kibana $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode']%}
|
||||
/usr/sbin/so-start logstash $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode']%}
|
||||
/usr/sbin/so-start curator $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval','so-manager', 'so-managersearch', 'so-standalone']%}
|
||||
/usr/sbin/so-start elastalert $1
|
||||
{%- endif %}
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
|
||||
{%- if grains['role'] in ['so-eval','so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode', 'so-import']%}
|
||||
/usr/sbin/so-stop elasticsearch $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval', 'so-manager', 'so-managersearch', 'so-standalone', 'so-import']%}
|
||||
/usr/sbin/so-stop kibana $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode']%}
|
||||
/usr/sbin/so-stop logstash $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone', 'so-heavynode', 'so-searchnode']%}
|
||||
/usr/sbin/so-stop curator $1
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval','so-manager', 'so-managersearch', 'so-standalone']%}
|
||||
/usr/sbin/so-stop elastalert $1
|
||||
{%- endif %}
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
{% import_yaml 'elasticsearch/defaults.yaml' as ESCONFIG with context %}
|
||||
{%- set ES_INDEX_SETTINGS = salt['pillar.get']('elasticsearch:index_settings', default=ESCONFIG.elasticsearch.index_settings, merge=True) %}
|
||||
{%- set NODEIP = salt['pillar.get']('host:mainip', '') %}
|
||||
|
||||
{%- for index, settings in ES_INDEX_SETTINGS.items() %}
|
||||
{%- if settings.policy is defined %}
|
||||
echo
|
||||
echo "Setting up {{ index }}-logs policy..."
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -s -k -L -X PUT "https://{{ NODEIP }}:9200/_ilm/policy/{{ index }}-logs" -H 'Content-Type: application/json' -d'{ "policy": {{ settings.policy | tojson(true) }} }'
|
||||
echo
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
echo
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
#
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
|
||||
IP={{ salt['grains.get']('ip_interfaces').get(salt['pillar.get']('sensor:mainint', salt['pillar.get']('manager:mainint', salt['pillar.get']('elasticsearch:mainint', salt['pillar.get']('host:mainint')))))[0] }}
|
||||
ESPORT=9200
|
||||
|
||||
echo "Removing read only attributes for indices..."
|
||||
echo
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -s -k -XPUT -H "Content-Type: application/json" -L https://$IP:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;
|
||||
@@ -1,104 +0,0 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 --role=<ROLE> --ip=<IP ADDRESS> --apply=<true|false>"
|
||||
echo ""
|
||||
echo " Example: so-firewall --role=sensor --ip=192.168.254.100 --apply=true"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for i in "$@"; do
|
||||
case $i in
|
||||
-r=*|--role=*)
|
||||
ROLE="${i#*=}"
|
||||
shift
|
||||
;;
|
||||
-i=*|--ip=*)
|
||||
IP="${i#*=}"
|
||||
shift
|
||||
;;
|
||||
-a=*|--apply*)
|
||||
APPLY="${i#*=}"
|
||||
shift
|
||||
;;
|
||||
-*|--*)
|
||||
echo "Unknown option $i"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
ROLE=${ROLE,,}
|
||||
APPLY=${APPLY,,}
|
||||
|
||||
function rolecall() {
|
||||
THEROLE=$1
|
||||
THEROLES="analyst analyst_workstations beats_endpoint beats_endpoint_ssl elastic_agent_endpoint elasticsearch_rest endgame eval fleet heavynodes idh manager managersearch receivers searchnodes sensors standalone strelka_frontend syslog"
|
||||
|
||||
for AROLE in $THEROLES; do
|
||||
if [ "$AROLE" = "$THEROLE" ]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Make sure the required options are specified
|
||||
if [ -z "$ROLE" ]; then
|
||||
echo "Please specify a role with --role="
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$IP" ]; then
|
||||
echo "Please specify an IP address with --ip="
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Are we dealing with a role that this script supports?
|
||||
if rolecall "$ROLE"; then
|
||||
echo "$ROLE is a supported role"
|
||||
else
|
||||
echo "This is not a supported role"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Are we dealing with an IP?
|
||||
if verify_ip4 "$IP"; then
|
||||
echo "$IP is a valid IP or CIDR"
|
||||
else
|
||||
echo "$IP is not a valid IP or CIDR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local_salt_dir=/opt/so/saltstack/local/salt/firewall
|
||||
|
||||
# Let's see if the file exists and if it does, let's see if the IP exists.
|
||||
if [ -f "$local_salt_dir/hostgroups/$ROLE" ]; then
|
||||
if grep -q $IP "$local_salt_dir/hostgroups/$ROLE"; then
|
||||
echo "Host already exists"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# If you have reached this part of your quest then let's add the IP
|
||||
echo "Adding $IP to the $ROLE role"
|
||||
echo "$IP" >> $local_salt_dir/hostgroups/$ROLE
|
||||
|
||||
# Check to see if we are applying this right away.
|
||||
if [ "$APPLY" = "true" ]; then
|
||||
echo "Applying the firewall rules"
|
||||
salt-call state.apply firewall queue=True
|
||||
echo "Firewall rules have been applied... Review logs further if there were errors."
|
||||
echo ""
|
||||
else
|
||||
echo "Firewall rules will be applied next salt run"
|
||||
fi
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
local_salt_dir=/opt/so/saltstack/local
|
||||
|
||||
got_root() {
|
||||
|
||||
# Make sure you are root
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo "This script must be run using sudo!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
got_root
|
||||
if [ ! -f $local_salt_dir/pillar/fireeye/init.sls ]; then
|
||||
echo "This is nto configured for Helix Mode. Please re-install."
|
||||
exit
|
||||
else
|
||||
echo "Enter your Helix API Key: "
|
||||
read APIKEY
|
||||
sed -i "s/^ api_key.*/ api_key: $APIKEY/g" $local_salt_dir/pillar/fireeye/init.sls
|
||||
docker stop so-logstash
|
||||
docker rm so-logstash
|
||||
echo "Restarting Logstash for updated key"
|
||||
salt-call state.apply logstash queue=True
|
||||
fi
|
||||
@@ -38,6 +38,7 @@ container_list() {
|
||||
"so-zeek"
|
||||
"so-elastic-agent"
|
||||
"so-elastic-agent-builder"
|
||||
"so-elastic-fleet-package-registry"
|
||||
)
|
||||
elif [ $MANAGERCHECK != 'so-helix' ]; then
|
||||
TRUSTED_CONTAINERS=(
|
||||
@@ -45,6 +46,7 @@ container_list() {
|
||||
"so-elastalert"
|
||||
"so-elastic-agent"
|
||||
"so-elastic-agent-builder"
|
||||
"so-elastic-fleet-package-registry"
|
||||
"so-elasticsearch"
|
||||
"so-idh"
|
||||
"so-idstools"
|
||||
|
||||
@@ -1,155 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{%- set MANAGER = salt['grains.get']('master') %}
|
||||
{%- set VERSION = salt['pillar.get']('global:soversion') %}
|
||||
{%- set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||
{%- set MANAGERIP = salt['pillar.get']('global:managerip') %}
|
||||
{%- set URLBASE = salt['pillar.get']('global:url_base') %}
|
||||
{%- set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
|
||||
{%- set ES_PASS = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %}
|
||||
|
||||
INDEX_DATE=$(date +'%Y.%m.%d')
|
||||
RUNID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1)
|
||||
LOG_FILE=/nsm/import/evtx-import.log
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
function usage {
|
||||
cat << EOF
|
||||
Usage: $0 <evtx-file-1> [evtx-file-2] [evtx-file-*]
|
||||
|
||||
Imports one or more evtx files into Security Onion. The evtx files will be analyzed and made available for review in the Security Onion toolset.
|
||||
EOF
|
||||
}
|
||||
|
||||
|
||||
function evtx2es() {
|
||||
EVTX=$1
|
||||
HASH=$2
|
||||
|
||||
docker run --rm \
|
||||
-v "$EVTX:/tmp/data.evtx" \
|
||||
-v "/nsm/import/$HASH/evtx/:/tmp/evtx/" \
|
||||
-v "/nsm/import/evtx-end_newest:/tmp/newest" \
|
||||
-v "/nsm/import/evtx-start_oldest:/tmp/oldest" \
|
||||
--entrypoint "/evtx_calc_timestamps.sh" \
|
||||
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} >> $LOG_FILE 2>&1
|
||||
}
|
||||
|
||||
# if no parameters supplied, display usage
|
||||
if [ $# -eq 0 ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ensure this is a Manager node
|
||||
require_manager
|
||||
|
||||
# verify that all parameters are files
|
||||
for i in "$@"; do
|
||||
if ! [ -f "$i" ]; then
|
||||
usage
|
||||
echo "\"$i\" is not a valid file!"
|
||||
exit 2
|
||||
fi
|
||||
done
|
||||
|
||||
# track if we have any valid or invalid evtx
|
||||
INVALID_EVTXS="no"
|
||||
VALID_EVTXS="no"
|
||||
|
||||
# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end
|
||||
START_OLDEST="2050-12-31"
|
||||
END_NEWEST="1971-01-01"
|
||||
|
||||
touch /nsm/import/evtx-start_oldest
|
||||
touch /nsm/import/evtx-end_newest
|
||||
|
||||
echo $START_OLDEST > /nsm/import/evtx-start_oldest
|
||||
echo $END_NEWEST > /nsm/import/evtx-end_newest
|
||||
|
||||
# paths must be quoted in case they include spaces
|
||||
for EVTX in "$@"; do
|
||||
EVTX=$(/usr/bin/realpath "$EVTX")
|
||||
echo "Processing Import: ${EVTX}"
|
||||
|
||||
# generate a unique hash to assist with dedupe checks
|
||||
HASH=$(md5sum "${EVTX}" | awk '{ print $1 }')
|
||||
HASH_DIR=/nsm/import/${HASH}
|
||||
echo "- assigning unique identifier to import: $HASH"
|
||||
|
||||
if [ -d $HASH_DIR ]; then
|
||||
echo "- this EVTX has already been imported; skipping"
|
||||
INVALID_EVTXS="yes"
|
||||
else
|
||||
VALID_EVTXS="yes"
|
||||
|
||||
EVTX_DIR=$HASH_DIR/evtx
|
||||
mkdir -p $EVTX_DIR
|
||||
|
||||
# import evtx and write them to import ingest pipeline
|
||||
echo "- importing logs to Elasticsearch..."
|
||||
evtx2es "${EVTX}" $HASH
|
||||
|
||||
# compare $START to $START_OLDEST
|
||||
START=$(cat /nsm/import/evtx-start_oldest)
|
||||
START_COMPARE=$(date -d $START +%s)
|
||||
START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s)
|
||||
if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then
|
||||
START_OLDEST=$START
|
||||
fi
|
||||
|
||||
# compare $ENDNEXT to $END_NEWEST
|
||||
END=$(cat /nsm/import/evtx-end_newest)
|
||||
ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"`
|
||||
ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s)
|
||||
END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s)
|
||||
if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then
|
||||
END_NEWEST=$ENDNEXT
|
||||
fi
|
||||
|
||||
cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx
|
||||
chmod 644 "${EVTX_DIR}"/data.evtx
|
||||
|
||||
fi # end of valid evtx
|
||||
|
||||
echo
|
||||
|
||||
done # end of for-loop processing evtx files
|
||||
|
||||
# remove temp files
|
||||
echo "Cleaning up:"
|
||||
for TEMP_EVTX in ${TEMP_EVTXS[@]}; do
|
||||
echo "- removing temporary evtx $TEMP_EVTX"
|
||||
rm -f $TEMP_EVTX
|
||||
done
|
||||
|
||||
# output final messages
|
||||
if [ "$INVALID_EVTXS" = "yes" ]; then
|
||||
echo
|
||||
echo "Please note! One or more evtx was invalid! You can scroll up to see which ones were invalid."
|
||||
fi
|
||||
|
||||
START_OLDEST_FORMATTED=`date +%Y-%m-%d --date="$START_OLDEST"`
|
||||
START_OLDEST_SLASH=$(echo $START_OLDEST_FORMATTED | sed -e 's/-/%2F/g')
|
||||
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
|
||||
|
||||
if [ "$VALID_EVTXS" = "yes" ]; then
|
||||
cat << EOF
|
||||
|
||||
Import complete!
|
||||
|
||||
You can use the following hyperlink to view data in the time range of your import. You can triple-click to quickly highlight the entire hyperlink and you can then copy it into your browser:
|
||||
https://{{ URLBASE }}/#/dashboards?q=import.id:${RUNID}%20%7C%20groupby%20-sankey%20event.dataset%20event.category%2a%20%7C%20groupby%20-pie%20event.category%20%7C%20groupby%20-bar%20event.module%20%7C%20groupby%20event.dataset%20%7C%20groupby%20event.module%20%7C%20groupby%20event.category%20%7C%20groupby%20observer.name%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC
|
||||
|
||||
or you can manually set your Time Range to be (in UTC):
|
||||
From: $START_OLDEST_FORMATTED To: $END_NEWEST
|
||||
|
||||
Please note that it may take 30 seconds or more for events to appear in Security Onion Console.
|
||||
EOF
|
||||
fi
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% set MAININT = salt['pillar.get']('host:mainint') -%}
|
||||
{% set NODEIP = salt['grains.get']('ip_interfaces').get(MAININT)[0] -%}
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
if [ "$1" == "" ]; then
|
||||
for i in $(curl -s -L http://{{ NODEIP }}:9600/_node/stats | jq .pipelines | jq '. | to_entries | .[].key' | sed 's/\"//g'); do echo ${i^}:; curl -s localhost:9600/_node/stats | jq .pipelines.$i.events; done
|
||||
else
|
||||
curl -s -L http://{{ NODEIP }}:9600/_node/stats | jq .pipelines.$1.events
|
||||
fi
|
||||
@@ -24,6 +24,7 @@ if [ $# -ge 1 ]; then
|
||||
|
||||
case $1 in
|
||||
"steno") docker stop so-steno && docker rm so-steno && salt-call state.apply pcap queue=True;;
|
||||
"elastic-fleet") docker stop so-elastic-fleet && docker rm so-elastic-fleet && salt-call state.apply elasticfleet queue=True;;
|
||||
*) docker stop so-$1 ; docker rm so-$1 ; salt-call state.apply $1 queue=True;;
|
||||
esac
|
||||
else
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
argstr=""
|
||||
for arg in "$@"; do
|
||||
argstr="${argstr} \"${arg}\""
|
||||
done
|
||||
|
||||
docker exec so-idstools /bin/bash -c "cd /opt/so/idstools/etc && idstools-rulecat --force ${argstr}"
|
||||
@@ -24,6 +24,7 @@ if [ $# -ge 1 ]; then
|
||||
case $1 in
|
||||
"all") salt-call state.highstate queue=True;;
|
||||
"steno") if docker ps | grep -q so-$1; then printf "\n$1 is already running!\n\n"; else docker rm so-$1 >/dev/null 2>&1 ; salt-call state.apply pcap queue=True; fi ;;
|
||||
"elastic-fleet") if docker ps | grep -q so-$1; then printf "\n$1 is already running!\n\n"; else docker rm so-$1 >/dev/null 2>&1 ; salt-call state.apply elasticfleet queue=True; fi ;;
|
||||
*) if docker ps | grep -E -q '^so-$1$'; then printf "\n$1 is already running\n\n"; else docker rm so-$1 >/dev/null 2>&1 ; salt-call state.apply $1 queue=True; fi ;;
|
||||
esac
|
||||
else
|
||||
|
||||
@@ -6,17 +6,17 @@
|
||||
# Elastic License 2.0.
|
||||
|
||||
|
||||
{# we only want the script to install the workstation if it is Rocky -#}
|
||||
{# we only want the script to install the desktop if it is Rocky -#}
|
||||
{% if grains.os == 'Rocky' -%}
|
||||
{# if this is a manager -#}
|
||||
{% if grains.master == grains.id.split('_')|first -%}
|
||||
|
||||
source /usr/sbin/so-common
|
||||
doc_workstation_url="$DOC_BASE_URL/analyst-vm.html"
|
||||
doc_desktop_url="$DOC_BASE_URL/desktop.html"
|
||||
pillar_file="/opt/so/saltstack/local/pillar/minions/{{grains.id}}.sls"
|
||||
|
||||
if [ -f "$pillar_file" ]; then
|
||||
if ! grep -q "^workstation:$" "$pillar_file"; then
|
||||
if ! grep -q "^desktop:$" "$pillar_file"; then
|
||||
|
||||
FIRSTPASS=yes
|
||||
while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do
|
||||
@@ -26,7 +26,7 @@ if [ -f "$pillar_file" ]; then
|
||||
echo "## _______________________________ ##"
|
||||
echo "## ##"
|
||||
echo "## Installing the Security Onion ##"
|
||||
echo "## analyst node on this device will ##"
|
||||
echo "## Desktop on this device will ##"
|
||||
echo "## make permanent changes to ##"
|
||||
echo "## the system. ##"
|
||||
echo "## A system reboot will be required ##"
|
||||
@@ -42,40 +42,40 @@ if [ -f "$pillar_file" ]; then
|
||||
done
|
||||
|
||||
if [[ $INSTALL == "no" ]]; then
|
||||
echo "Exiting analyst node installation."
|
||||
echo "Exiting desktop node installation."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Add workstation pillar to the minion's pillar file
|
||||
# Add desktop pillar to the minion's pillar file
|
||||
printf '%s\n'\
|
||||
"workstation:"\
|
||||
"desktop:"\
|
||||
" gui:"\
|
||||
" enabled: true"\
|
||||
"" >> "$pillar_file"
|
||||
echo "Applying the workstation state. This could take some time since there are many packages that need to be installed."
|
||||
if salt-call state.apply workstation -linfo queue=True; then # make sure the state ran successfully
|
||||
echo "Applying the desktop state. This could take some time since there are many packages that need to be installed."
|
||||
if salt-call state.apply desktop -linfo queue=True; then # make sure the state ran successfully
|
||||
echo ""
|
||||
echo "Analyst workstation has been installed!"
|
||||
echo "Security Onion Desktop has been installed!"
|
||||
echo "Press ENTER to reboot or Ctrl-C to cancel."
|
||||
read pause
|
||||
|
||||
reboot;
|
||||
else
|
||||
echo "There was an issue applying the workstation state. Please review the log above or at /opt/so/log/salt/minion."
|
||||
echo "There was an issue applying the desktop state. Please review the log above or at /opt/so/log/salt/minion."
|
||||
fi
|
||||
else # workstation is already added
|
||||
echo "The workstation pillar already exists in $pillar_file."
|
||||
echo "To enable/disable the gui, set 'workstation:gui:enabled' to true or false in $pillar_file."
|
||||
echo "Additional documentation can be found at $doc_workstation_url."
|
||||
else # desktop is already added
|
||||
echo "The desktop pillar already exists in $pillar_file."
|
||||
echo "To enable/disable the gui, set 'desktop:gui:enabled' to true or false in $pillar_file."
|
||||
echo "Additional documentation can be found at $doc_desktop_url."
|
||||
fi
|
||||
else # if the pillar file doesn't exist
|
||||
echo "Could not find $pillar_file and add the workstation pillar."
|
||||
echo "Could not find $pillar_file and add the desktop pillar."
|
||||
fi
|
||||
|
||||
{#- if this is not a manager #}
|
||||
{% else -%}
|
||||
|
||||
echo "Since this is not a manager, the pillar values to enable analyst workstation must be set manually. Please view the documentation at $doc_workstation_url."
|
||||
echo "Since this is not a manager, the pillar values to enable Security Onion Desktop must be set manually. Please view the documentation at $doc_desktop_url."
|
||||
|
||||
{#- endif if this is a manager #}
|
||||
{% endif -%}
|
||||
@@ -83,7 +83,7 @@ echo "Since this is not a manager, the pillar values to enable analyst workstati
|
||||
{#- if not Rocky #}
|
||||
{%- else %}
|
||||
|
||||
echo "The Analyst Workstation can only be installed on Rocky. Please view the documentation at $doc_workstation_url."
|
||||
echo "The Security Onion Desktop can only be installed on Rocky Linux. Please view the documentation at $doc_desktop_url."
|
||||
|
||||
{#- endif grains.os == Rocky #}
|
||||
{% endif -%}
|
||||
225
salt/common/tools/sbin_jinja/so-import-evtx
Executable file
@@ -0,0 +1,225 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{%- set MANAGER = salt['grains.get']('master') %}
|
||||
{%- set VERSION = salt['pillar.get']('global:soversion') %}
|
||||
{%- set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||
{%- set MANAGERIP = salt['pillar.get']('global:managerip') %}
|
||||
{%- set URLBASE = salt['pillar.get']('global:url_base') %}
|
||||
{%- set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
|
||||
{%- set ES_PASS = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %}
|
||||
|
||||
INDEX_DATE=$(date +'%Y.%m.%d')
|
||||
LOG_FILE=/nsm/import/evtx-import.log
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
function usage {
|
||||
cat << EOF
|
||||
Usage: $0 [options] <evtx-file-1> [evtx-file-2] [evtx-file-*]
|
||||
|
||||
Imports one or more evtx files into Security Onion. The evtx files will be analyzed and made available for review in the Security Onion toolset.
|
||||
|
||||
Options:
|
||||
--json Outputs summary in JSON format. Implies --quiet.
|
||||
--quiet Silences progress information to stdout.
|
||||
EOF
|
||||
}
|
||||
|
||||
quiet=0
|
||||
json=0
|
||||
INPUT_FILES=
|
||||
while [[ $# -gt 0 ]]; do
|
||||
param=$1
|
||||
shift
|
||||
case "$param" in
|
||||
--json)
|
||||
json=1
|
||||
quiet=1
|
||||
;;
|
||||
--quiet)
|
||||
quiet=1
|
||||
;;
|
||||
-*)
|
||||
echo "Encountered unexpected parameter: $param"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
if [[ "$INPUT_FILES" != "" ]]; then
|
||||
INPUT_FILES="$INPUT_FILES $param"
|
||||
else
|
||||
INPUT_FILES="$param"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
function status {
|
||||
msg=$1
|
||||
[[ $quiet -eq 1 ]] && return
|
||||
echo "$msg"
|
||||
}
|
||||
|
||||
function evtx2es() {
|
||||
EVTX=$1
|
||||
HASH=$2
|
||||
|
||||
docker run --rm \
|
||||
-v "$EVTX:/tmp/data.evtx" \
|
||||
-v "/nsm/import/$HASH/evtx/:/tmp/evtx/" \
|
||||
-v "/nsm/import/evtx-end_newest:/tmp/newest" \
|
||||
-v "/nsm/import/evtx-start_oldest:/tmp/oldest" \
|
||||
--entrypoint "/evtx_calc_timestamps.sh" \
|
||||
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} >> $LOG_FILE 2>&1
|
||||
}
|
||||
|
||||
# if no parameters supplied, display usage
|
||||
if [ "$INPUT_FILES" == "" ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ensure this is a Manager node
|
||||
require_manager @> /dev/null
|
||||
|
||||
# verify that all parameters are files
|
||||
for i in $INPUT_FILES; do
|
||||
if ! [ -f "$i" ]; then
|
||||
echo "\"$i\" is not a valid file!"
|
||||
exit 2
|
||||
fi
|
||||
done
|
||||
|
||||
# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end
|
||||
START_OLDEST="2050-12-31"
|
||||
END_NEWEST="1971-01-01"
|
||||
|
||||
INVALID_EVTXS_COUNT=0
|
||||
VALID_EVTXS_COUNT=0
|
||||
SKIPPED_EVTXS_COUNT=0
|
||||
|
||||
touch /nsm/import/evtx-start_oldest
|
||||
touch /nsm/import/evtx-end_newest
|
||||
|
||||
echo $START_OLDEST > /nsm/import/evtx-start_oldest
|
||||
echo $END_NEWEST > /nsm/import/evtx-end_newest
|
||||
|
||||
# paths must be quoted in case they include spaces
|
||||
for EVTX in $INPUT_FILES; do
|
||||
EVTX=$(/usr/bin/realpath "$EVTX")
|
||||
status "Processing Import: ${EVTX}"
|
||||
|
||||
# generate a unique hash to assist with dedupe checks
|
||||
HASH=$(md5sum "${EVTX}" | awk '{ print $1 }')
|
||||
HASH_DIR=/nsm/import/${HASH}
|
||||
status "- assigning unique identifier to import: $HASH"
|
||||
|
||||
if [[ "$HASH_FILTERS" == "" ]]; then
|
||||
HASH_FILTERS="import.id:${HASH}"
|
||||
HASHES="${HASH}"
|
||||
else
|
||||
HASH_FILTERS="$HASH_FILTERS%20OR%20import.id:${HASH}"
|
||||
HASHES="${HASHES} ${HASH}"
|
||||
fi
|
||||
|
||||
if [ -d $HASH_DIR ]; then
|
||||
status "- this EVTX has already been imported; skipping"
|
||||
SKIPPED_EVTXS_COUNT=$((SKIPPED_EVTXS_COUNT + 1))
|
||||
else
|
||||
EVTX_DIR=$HASH_DIR/evtx
|
||||
mkdir -p $EVTX_DIR
|
||||
|
||||
# import evtx and write them to import ingest pipeline
|
||||
status "- importing logs to Elasticsearch..."
|
||||
evtx2es "${EVTX}" $HASH
|
||||
if [[ $? -ne 0 ]]; then
|
||||
INVALID_EVTXS_COUNT=$((INVALID_EVTXS_COUNT + 1))
|
||||
status "- WARNING: This evtx file may not have fully imported successfully"
|
||||
else
|
||||
VALID_EVTXS_COUNT=$((VALID_EVTXS_COUNT + 1))
|
||||
fi
|
||||
|
||||
# compare $START to $START_OLDEST
|
||||
START=$(cat /nsm/import/evtx-start_oldest)
|
||||
START_COMPARE=$(date -d $START +%s)
|
||||
START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s)
|
||||
if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then
|
||||
START_OLDEST=$START
|
||||
fi
|
||||
|
||||
# compare $ENDNEXT to $END_NEWEST
|
||||
END=$(cat /nsm/import/evtx-end_newest)
|
||||
ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"`
|
||||
ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s)
|
||||
END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s)
|
||||
if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then
|
||||
END_NEWEST=$ENDNEXT
|
||||
fi
|
||||
|
||||
cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx
|
||||
chmod 644 "${EVTX_DIR}"/data.evtx
|
||||
|
||||
fi # end of valid evtx
|
||||
|
||||
status
|
||||
|
||||
done # end of for-loop processing evtx files
|
||||
|
||||
# output final messages
|
||||
if [[ $INVALID_EVTXS_COUNT -gt 0 ]]; then
|
||||
status
|
||||
status "Please note! One or more evtx was invalid! You can scroll up to see which ones were invalid."
|
||||
fi
|
||||
|
||||
START_OLDEST_FORMATTED=`date +%Y-%m-%d --date="$START_OLDEST"`
|
||||
START_OLDEST_SLASH=$(echo $START_OLDEST_FORMATTED | sed -e 's/-/%2F/g')
|
||||
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
|
||||
|
||||
if [[ $VALID_EVTXS_COUNT -gt 0 ]] || [[ $SKIPPED_EVTXS_COUNT -gt 0 ]]; then
|
||||
URL="https://{{ URLBASE }}/#/dashboards?q=$HASH_FILTERS%20%7C%20groupby%20-sankey%20event.dataset%20event.category%2a%20%7C%20groupby%20-pie%20event.category%20%7C%20groupby%20-bar%20event.module%20%7C%20groupby%20event.dataset%20%7C%20groupby%20event.module%20%7C%20groupby%20event.category%20%7C%20groupby%20observer.name%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC"
|
||||
|
||||
status "Import complete!"
|
||||
status
|
||||
status "Use the following hyperlink to view the imported data. Triple-click to quickly highlight the entire hyperlink and then copy it into a browser:"
|
||||
status
|
||||
status "$URL"
|
||||
status
|
||||
status "or, manually set the Time Range to be (in UTC):"
|
||||
status
|
||||
status "From: $START_OLDEST_FORMATTED To: $END_NEWEST"
|
||||
status
|
||||
status "Note: It can take 30 seconds or more for events to appear in Security Onion Console."
|
||||
RESULT=0
|
||||
else
|
||||
START_OLDEST=
|
||||
END_NEWEST=
|
||||
URL=
|
||||
RESULT=1
|
||||
fi
|
||||
|
||||
if [[ $json -eq 1 ]]; then
|
||||
jq -n \
|
||||
--arg success_count "$VALID_EVTXS_COUNT" \
|
||||
--arg fail_count "$INVALID_EVTXS_COUNT" \
|
||||
--arg skipped_count "$SKIPPED_EVTXS_COUNT" \
|
||||
--arg begin_date "$START_OLDEST" \
|
||||
--arg end_date "$END_NEWEST" \
|
||||
--arg url "$URL" \
|
||||
--arg hashes "$HASHES" \
|
||||
'''{
|
||||
success_count: $success_count,
|
||||
fail_count: $fail_count,
|
||||
skipped_count: $skipped_count,
|
||||
begin_date: $begin_date,
|
||||
end_date: $end_date,
|
||||
url: $url,
|
||||
hash: ($hashes / " ")
|
||||
}'''
|
||||
fi
|
||||
|
||||
exit $RESULT
|
||||
@@ -15,12 +15,51 @@
|
||||
|
||||
function usage {
|
||||
cat << EOF
|
||||
Usage: $0 <pcap-file-1> [pcap-file-2] [pcap-file-N]
|
||||
Usage: $0 [options] <pcap-file-1> [pcap-file-2] [pcap-file-N]
|
||||
|
||||
Imports one or more PCAP files onto a sensor node. The PCAP traffic will be analyzed and made available for review in the Security Onion toolset.
|
||||
|
||||
Options:
|
||||
--json Outputs summary in JSON format. Implies --quiet.
|
||||
--quiet Silences progress information to stdout.
|
||||
EOF
|
||||
}
|
||||
|
||||
quiet=0
|
||||
json=0
|
||||
INPUT_FILES=
|
||||
while [[ $# -gt 0 ]]; do
|
||||
param=$1
|
||||
shift
|
||||
case "$param" in
|
||||
--json)
|
||||
json=1
|
||||
quiet=1
|
||||
;;
|
||||
--quiet)
|
||||
quiet=1
|
||||
;;
|
||||
-*)
|
||||
echo "Encountered unexpected parameter: $param"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
if [[ "$INPUT_FILES" != "" ]]; then
|
||||
INPUT_FILES="$INPUT_FILES $param"
|
||||
else
|
||||
INPUT_FILES="$param"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
function status {
|
||||
msg=$1
|
||||
[[ $quiet -eq 1 ]] && return
|
||||
echo "$msg"
|
||||
}
|
||||
|
||||
function pcapinfo() {
|
||||
PCAP=$1
|
||||
ARGS=$2
|
||||
@@ -84,7 +123,7 @@ function zeek() {
|
||||
}
|
||||
|
||||
# if no parameters supplied, display usage
|
||||
if [ $# -eq 0 ]; then
|
||||
if [ "$INPUT_FILES" == "" ]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
@@ -96,31 +135,30 @@ if [ ! -d /opt/so/conf/suricata ]; then
|
||||
fi
|
||||
|
||||
# verify that all parameters are files
|
||||
for i in "$@"; do
|
||||
for i in $INPUT_FILES; do
|
||||
if ! [ -f "$i" ]; then
|
||||
usage
|
||||
echo "\"$i\" is not a valid file!"
|
||||
exit 2
|
||||
fi
|
||||
done
|
||||
|
||||
# track if we have any valid or invalid pcaps
|
||||
INVALID_PCAPS="no"
|
||||
VALID_PCAPS="no"
|
||||
|
||||
# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end
|
||||
START_OLDEST="2050-12-31"
|
||||
END_NEWEST="1971-01-01"
|
||||
|
||||
INVALID_PCAPS_COUNT=0
|
||||
VALID_PCAPS_COUNT=0
|
||||
SKIPPED_PCAPS_COUNT=0
|
||||
|
||||
# paths must be quoted in case they include spaces
|
||||
for PCAP in "$@"; do
|
||||
for PCAP in $INPUT_FILES; do
|
||||
PCAP=$(/usr/bin/realpath "$PCAP")
|
||||
echo "Processing Import: ${PCAP}"
|
||||
echo "- verifying file"
|
||||
status "Processing Import: ${PCAP}"
|
||||
status "- verifying file"
|
||||
if ! pcapinfo "${PCAP}" > /dev/null 2>&1; then
|
||||
# try to fix pcap and then process the fixed pcap directly
|
||||
PCAP_FIXED=`mktemp /tmp/so-import-pcap-XXXXXXXXXX.pcap`
|
||||
echo "- attempting to recover corrupted PCAP file"
|
||||
status "- attempting to recover corrupted PCAP file"
|
||||
pcapfix "${PCAP}" "${PCAP_FIXED}"
|
||||
# Make fixed file world readable since the Suricata docker container will runas a non-root user
|
||||
chmod a+r "${PCAP_FIXED}"
|
||||
@@ -131,33 +169,44 @@ for PCAP in "$@"; do
|
||||
# generate a unique hash to assist with dedupe checks
|
||||
HASH=$(md5sum "${PCAP}" | awk '{ print $1 }')
|
||||
HASH_DIR=/nsm/import/${HASH}
|
||||
echo "- assigning unique identifier to import: $HASH"
|
||||
status "- assigning unique identifier to import: $HASH"
|
||||
|
||||
if [ -d $HASH_DIR ]; then
|
||||
echo "- this PCAP has already been imported; skipping"
|
||||
INVALID_PCAPS="yes"
|
||||
elif pcapinfo "${PCAP}" |egrep -q "Last packet time: 1970-01-01|Last packet time: n/a"; then
|
||||
echo "- this PCAP file is invalid; skipping"
|
||||
INVALID_PCAPS="yes"
|
||||
pcap_data=$(pcapinfo "${PCAP}")
|
||||
if ! echo "$pcap_data" | grep -q "First packet time:" || echo "$pcap_data" |egrep -q "Last packet time: 1970-01-01|Last packet time: n/a"; then
|
||||
status "- this PCAP file is invalid; skipping"
|
||||
INVALID_PCAPS_COUNT=$((INVALID_PCAPS_COUNT + 1))
|
||||
else
|
||||
VALID_PCAPS="yes"
|
||||
if [ -d $HASH_DIR ]; then
|
||||
status "- this PCAP has already been imported; skipping"
|
||||
SKIPPED_PCAPS_COUNT=$((SKIPPED_PCAPS_COUNT + 1))
|
||||
else
|
||||
VALID_PCAPS_COUNT=$((VALID_PCAPS_COUNT + 1))
|
||||
|
||||
PCAP_DIR=$HASH_DIR/pcap
|
||||
mkdir -p $PCAP_DIR
|
||||
PCAP_DIR=$HASH_DIR/pcap
|
||||
mkdir -p $PCAP_DIR
|
||||
|
||||
# generate IDS alerts and write them to standard pipeline
|
||||
echo "- analyzing traffic with Suricata"
|
||||
suricata "${PCAP}" $HASH
|
||||
{% if salt['pillar.get']('global:mdengine') == 'ZEEK' %}
|
||||
# generate Zeek logs and write them to a unique subdirectory in /nsm/import/zeek/
|
||||
# since each run writes to a unique subdirectory, there is no need for a lock file
|
||||
echo "- analyzing traffic with Zeek"
|
||||
zeek "${PCAP}" $HASH
|
||||
{% endif %}
|
||||
# generate IDS alerts and write them to standard pipeline
|
||||
status "- analyzing traffic with Suricata"
|
||||
suricata "${PCAP}" $HASH
|
||||
{% if salt['pillar.get']('global:mdengine') == 'ZEEK' %}
|
||||
# generate Zeek logs and write them to a unique subdirectory in /nsm/import/zeek/
|
||||
# since each run writes to a unique subdirectory, there is no need for a lock file
|
||||
status "- analyzing traffic with Zeek"
|
||||
zeek "${PCAP}" $HASH
|
||||
{% endif %}
|
||||
fi
|
||||
|
||||
if [[ "$HASH_FILTERS" == "" ]]; then
|
||||
HASH_FILTERS="import.id:${HASH}"
|
||||
HASHES="${HASH}"
|
||||
else
|
||||
HASH_FILTERS="$HASH_FILTERS%20OR%20import.id:${HASH}"
|
||||
HASHES="${HASHES} ${HASH}"
|
||||
fi
|
||||
|
||||
START=$(pcapinfo "${PCAP}" -a |grep "First packet time:" | awk '{print $4}')
|
||||
END=$(pcapinfo "${PCAP}" -e |grep "Last packet time:" | awk '{print $4}')
|
||||
echo "- saving PCAP data spanning dates $START through $END"
|
||||
status "- found PCAP data spanning dates $START through $END"
|
||||
|
||||
# compare $START to $START_OLDEST
|
||||
START_COMPARE=$(date -d $START +%s)
|
||||
@@ -179,37 +228,62 @@ for PCAP in "$@"; do
|
||||
|
||||
fi # end of valid pcap
|
||||
|
||||
echo
|
||||
status
|
||||
|
||||
done # end of for-loop processing pcap files
|
||||
|
||||
# remove temp files
|
||||
echo "Cleaning up:"
|
||||
for TEMP_PCAP in ${TEMP_PCAPS[@]}; do
|
||||
echo "- removing temporary pcap $TEMP_PCAP"
|
||||
status "- removing temporary pcap $TEMP_PCAP"
|
||||
rm -f $TEMP_PCAP
|
||||
done
|
||||
|
||||
# output final messages
|
||||
if [ "$INVALID_PCAPS" = "yes" ]; then
|
||||
echo
|
||||
echo "Please note! One or more pcaps was invalid! You can scroll up to see which ones were invalid."
|
||||
if [[ $INVALID_PCAPS_COUNT -gt 0 ]]; then
|
||||
status
|
||||
status "WARNING: One or more pcaps was invalid. Scroll up to see which ones were invalid."
|
||||
fi
|
||||
|
||||
START_OLDEST_SLASH=$(echo $START_OLDEST | sed -e 's/-/%2F/g')
|
||||
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
|
||||
if [[ $VALID_PCAPS_COUNT -gt 0 ]] || [[ $SKIPPED_PCAPS_COUNT -gt 0 ]]; then
|
||||
URL="https://{{ URLBASE }}/#/dashboards?q=$HASH_FILTERS%20%7C%20groupby%20-sankey%20event.dataset%20event.category%2a%20%7C%20groupby%20-pie%20event.category%20%7C%20groupby%20-bar%20event.module%20%7C%20groupby%20event.dataset%20%7C%20groupby%20event.module%20%7C%20groupby%20event.category%20%7C%20groupby%20observer.name%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC"
|
||||
|
||||
if [ "$VALID_PCAPS" = "yes" ]; then
|
||||
cat << EOF
|
||||
|
||||
Import complete!
|
||||
|
||||
You can use the following hyperlink to view data in the time range of your import. You can triple-click to quickly highlight the entire hyperlink and you can then copy it into your browser:
|
||||
https://{{ URLBASE }}/#/dashboards?q=import.id:${HASH}%20%7C%20groupby%20-sankey%20event.dataset%20event.category%2a%20%7C%20groupby%20-pie%20event.category%20%7C%20groupby%20-bar%20event.module%20%7C%20groupby%20event.dataset%20%7C%20groupby%20event.module%20%7C%20groupby%20event.category%20%7C%20groupby%20observer.name%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC
|
||||
|
||||
or you can manually set your Time Range to be (in UTC):
|
||||
From: $START_OLDEST To: $END_NEWEST
|
||||
|
||||
Please note that it may take 30 seconds or more for events to appear in Security Onion Console.
|
||||
EOF
|
||||
status "Import complete!"
|
||||
status
|
||||
status "Use the following hyperlink to view the imported data. Triple-click to quickly highlight the entire hyperlink and then copy it into a browser:"
|
||||
status "$URL"
|
||||
status
|
||||
status "or, manually set the Time Range to be (in UTC):"
|
||||
status "From: $START_OLDEST To: $END_NEWEST"
|
||||
status
|
||||
status "Note: It can take 30 seconds or more for events to appear in Security Onion Console."
|
||||
RESULT=0
|
||||
else
|
||||
START_OLDEST=
|
||||
END_NEWEST=
|
||||
URL=
|
||||
RESULT=1
|
||||
fi
|
||||
|
||||
if [[ $json -eq 1 ]]; then
|
||||
jq -n \
|
||||
--arg success_count "$VALID_PCAPS_COUNT" \
|
||||
--arg fail_count "$INVALID_PCAPS_COUNT" \
|
||||
--arg skipped_count "$SKIPPED_PCAPS_COUNT" \
|
||||
--arg begin_date "$START_OLDEST" \
|
||||
--arg end_date "$END_NEWEST" \
|
||||
--arg url "$URL" \
|
||||
--arg hashes "$HASHES" \
|
||||
'''{
|
||||
success_count: $success_count,
|
||||
fail_count: $fail_count,
|
||||
skipped_count: $skipped_count,
|
||||
begin_date: $begin_date,
|
||||
end_date: $end_date,
|
||||
url: $url,
|
||||
hash: ($hashes / " ")
|
||||
}'''
|
||||
fi
|
||||
|
||||
exit $RESULT
|
||||
81
salt/curator/config.sls
Normal file
@@ -0,0 +1,81 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls.split('.')[0] in allowed_states %}
|
||||
{% from "curator/map.jinja" import CURATORMERGED %}
|
||||
|
||||
# Create the group
|
||||
curatorgroup:
|
||||
group.present:
|
||||
- name: curator
|
||||
- gid: 934
|
||||
|
||||
# Add user
|
||||
curator:
|
||||
user.present:
|
||||
- uid: 934
|
||||
- gid: 934
|
||||
- home: /opt/so/conf/curator
|
||||
- createhome: False
|
||||
|
||||
# Create the log directory
|
||||
curlogdir:
|
||||
file.directory:
|
||||
- name: /opt/so/log/curator
|
||||
- user: 934
|
||||
- group: 939
|
||||
|
||||
curactiondir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/curator/action
|
||||
- user: 934
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
actionconfs:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/curator/action
|
||||
- source: salt://curator/files/action
|
||||
- user: 934
|
||||
- group: 939
|
||||
- template: jinja
|
||||
- defaults:
|
||||
CURATORMERGED: {{ CURATORMERGED.elasticsearch.index_settings }}
|
||||
|
||||
curconf:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/curator/curator.yml
|
||||
- source: salt://curator/files/curator.yml
|
||||
- user: 934
|
||||
- group: 939
|
||||
- mode: 660
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
curator_sbin:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- source: salt://curator/tools/sbin
|
||||
- user: 934
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
|
||||
curator_sbin_jinja:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- source: salt://curator/tools/sbin_jinja
|
||||
- user: 934
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
{% endif %}
|
||||
@@ -1,98 +1,100 @@
|
||||
elasticsearch:
|
||||
index_settings:
|
||||
logs-import-so:
|
||||
close: 73000
|
||||
delete: 73001
|
||||
logs-strelka-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-suricata-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-syslog-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-zeek-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-metricbeat-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-osquerybeat-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-fleet_server-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-filebeat-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-auth-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-application-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-security-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-system-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-syslog-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-windows-powershell-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-windows-sysmon_operational-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-beats:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-elasticsearch:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-firewall:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-ids:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-import:
|
||||
close: 73000
|
||||
delete: 73001
|
||||
so-kratos:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-kibana:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-logstash:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-netflow:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-osquery:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-ossec:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-redis:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-strelka:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-syslog:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-zeek:
|
||||
close: 30
|
||||
delete: 365
|
||||
curator:
|
||||
enabled: False
|
||||
elasticsearch:
|
||||
index_settings:
|
||||
logs-import-so:
|
||||
close: 73000
|
||||
delete: 73001
|
||||
logs-strelka-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-suricata-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-syslog-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-zeek-so:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-metricbeat-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-osquerybeat-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-fleet_server-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-filebeat-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-elastic_agent-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-auth-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-application-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-security-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-system-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-system-syslog-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-windows-powershell-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
logs-windows-sysmon_operational-default:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-beats:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-elasticsearch:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-firewall:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-ids:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-import:
|
||||
close: 73000
|
||||
delete: 73001
|
||||
so-kratos:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-kibana:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-logstash:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-netflow:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-osquery:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-ossec:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-redis:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-strelka:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-syslog:
|
||||
close: 30
|
||||
delete: 365
|
||||
so-zeek:
|
||||
close: 30
|
||||
delete: 365
|
||||
|
||||
35
salt/curator/disabled.sls
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls.split('.')[0] in allowed_states %}
|
||||
|
||||
include:
|
||||
- curator.sostatus
|
||||
|
||||
so-curator:
|
||||
docker_container.absent:
|
||||
- force: True
|
||||
|
||||
so-curator_so-status.disabled:
|
||||
file.comment:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- regex: ^so-curator$
|
||||
|
||||
so-curator-cluster-close:
|
||||
cron.absent:
|
||||
- identifier: so-curator-cluster-close
|
||||
|
||||
so-curator-cluster-delete:
|
||||
cron.absent:
|
||||
- identifier: so-curator-cluster-delete
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
{% endif %}
|
||||
88
salt/curator/enabled.sls
Normal file
@@ -0,0 +1,88 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls.split('.')[0] in allowed_states %}
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
{% from 'docker/docker.map.jinja' import DOCKER %}
|
||||
|
||||
include:
|
||||
- curator.config
|
||||
- curator.sostatus
|
||||
|
||||
so-curator:
|
||||
docker_container.running:
|
||||
- image: {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-curator:{{ GLOBALS.so_version }}
|
||||
- start: True
|
||||
- hostname: curator
|
||||
- name: so-curator
|
||||
- user: curator
|
||||
- networks:
|
||||
- sobridge:
|
||||
- ipv4_address: {{ DOCKER.containers['so-curator'].ip }}
|
||||
- interactive: True
|
||||
- tty: True
|
||||
- binds:
|
||||
- /opt/so/conf/curator/curator.yml:/etc/curator/config/curator.yml:ro
|
||||
- /opt/so/conf/curator/action/:/etc/curator/action:ro
|
||||
- /opt/so/log/curator:/var/log/curator:rw
|
||||
{% if DOCKER.containers['so-curator'].custom_bind_mounts %}
|
||||
{% for BIND in DOCKER.containers['so-curator'].custom_bind_mounts %}
|
||||
- {{ BIND }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if DOCKER.containers['so-curator'].extra_hosts %}
|
||||
- extra_hosts:
|
||||
{% for XTRAHOST in DOCKER.containers['so-curator'].extra_hosts %}
|
||||
- {{ XTRAHOST }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if DOCKER.containers['so-curator'].extra_env %}
|
||||
- environment:
|
||||
{% for XTRAENV in DOCKER.containers['so-curator'].extra_env %}
|
||||
- {{ XTRAENV }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
- require:
|
||||
- file: actionconfs
|
||||
- file: curconf
|
||||
- file: curlogdir
|
||||
- watch:
|
||||
- file: curconf
|
||||
|
||||
delete_so-curator_so-status.disabled:
|
||||
file.uncomment:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- regex: ^so-curator$
|
||||
|
||||
so-curator-cluster-close:
|
||||
cron.present:
|
||||
- name: /usr/sbin/so-curator-cluster-close > /opt/so/log/curator/cron-close.log 2>&1
|
||||
- identifier: so-curator-cluster-close
|
||||
- user: root
|
||||
- minute: '2'
|
||||
- hour: '*/1'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
so-curator-cluster-delete:
|
||||
cron.present:
|
||||
- name: /usr/sbin/so-curator-cluster-delete > /opt/so/log/curator/cron-cluster-delete.log 2>&1
|
||||
- identifier: so-curator-cluster-delete
|
||||
- user: root
|
||||
- minute: '*/5'
|
||||
- hour: '*'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
{% endif %}
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -13,7 +13,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -13,7 +13,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -12,7 +12,7 @@ actions:
|
||||
options:
|
||||
delete_aliases: False
|
||||
timeout_override:
|
||||
continue_if_exception: False
|
||||
ignore_empty_list: True
|
||||
disable_action: False
|
||||
filters:
|
||||
- filtertype: pattern
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
{% if grains['role'] in ['so-searchnode', 'so-heavynode'] %}
|
||||
{% if GLOBALS.role in ['so-searchnode', 'so-heavynode'] %}
|
||||
{%- set elasticsearch = GLOBALS.node_ip -%}
|
||||
{% elif grains['role'] in ['so-eval', 'so-managersearch', 'so-standalone', 'so-manager'] %}
|
||||
{% elif GLOBALS.role in ['so-eval', 'so-managersearch', 'so-standalone', 'so-manager'] %}
|
||||
{%- set elasticsearch = GLOBALS.manager_ip -%}
|
||||
{%- endif %}
|
||||
{%- set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
|
||||
@@ -30,10 +30,8 @@ elasticsearch:
|
||||
id:
|
||||
api_key:
|
||||
master_only: False
|
||||
{%- if salt['pillar.get']('elasticsearch:auth:enabled') is sameas true %}
|
||||
username: "{{ ES_USER }}"
|
||||
password: "{{ ES_PASS }}"
|
||||
{%- endif %}
|
||||
|
||||
logging:
|
||||
loglevel: INFO
|
||||
|
||||
@@ -3,155 +3,11 @@
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls in allowed_states %}
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
{% from 'docker/docker.map.jinja' import DOCKER %}
|
||||
{% from "curator/map.jinja" import CURATOROPTIONS %}
|
||||
{% from "curator/map.jinja" import CURATORMERGED %}
|
||||
{% set REMOVECURATORCRON = False %}
|
||||
|
||||
# Curator
|
||||
# Create the group
|
||||
curatorgroup:
|
||||
group.present:
|
||||
- name: curator
|
||||
- gid: 934
|
||||
|
||||
# Add user
|
||||
curator:
|
||||
user.present:
|
||||
- uid: 934
|
||||
- gid: 934
|
||||
- home: /opt/so/conf/curator
|
||||
- createhome: False
|
||||
|
||||
# Create the log directory
|
||||
curlogdir:
|
||||
file.directory:
|
||||
- name: /opt/so/log/curator
|
||||
- user: 934
|
||||
- group: 939
|
||||
|
||||
curactiondir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/curator/action
|
||||
- user: 934
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
actionconfs:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/curator/action
|
||||
- source: salt://curator/files/action
|
||||
- user: 934
|
||||
- group: 939
|
||||
- template: jinja
|
||||
- defaults:
|
||||
CURATORMERGED: {{ CURATORMERGED }}
|
||||
|
||||
curconf:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/curator/curator.yml
|
||||
- source: salt://curator/files/curator.yml
|
||||
- user: 934
|
||||
- group: 939
|
||||
- mode: 660
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
curclusterclose:
|
||||
file.managed:
|
||||
- name: /usr/sbin/so-curator-cluster-close
|
||||
- source: salt://curator/files/bin/so-curator-cluster-close
|
||||
- user: 934
|
||||
- group: 939
|
||||
- mode: 755
|
||||
- template: jinja
|
||||
|
||||
curclusterdelete:
|
||||
file.managed:
|
||||
- name: /usr/sbin/so-curator-cluster-delete
|
||||
- source: salt://curator/files/bin/so-curator-cluster-delete
|
||||
- user: 934
|
||||
- group: 939
|
||||
- mode: 755
|
||||
|
||||
curclusterdeletedelete:
|
||||
file.managed:
|
||||
- name: /usr/sbin/so-curator-cluster-delete-delete
|
||||
- source: salt://curator/files/bin/so-curator-cluster-delete-delete
|
||||
- user: 934
|
||||
- group: 939
|
||||
- mode: 755
|
||||
- template: jinja
|
||||
|
||||
so-curator:
|
||||
docker_container.{{ CURATOROPTIONS.status }}:
|
||||
- image: {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-curator:{{ GLOBALS.so_version }}
|
||||
- start: {{ CURATOROPTIONS.start }}
|
||||
- hostname: curator
|
||||
- name: so-curator
|
||||
- user: curator
|
||||
- networks:
|
||||
- sobridge:
|
||||
- ipv4_address: {{ DOCKER.containers['so-curator'].ip }}
|
||||
- interactive: True
|
||||
- tty: True
|
||||
- binds:
|
||||
- /opt/so/conf/curator/curator.yml:/etc/curator/config/curator.yml:ro
|
||||
- /opt/so/conf/curator/action/:/etc/curator/action:ro
|
||||
- /opt/so/log/curator:/var/log/curator:rw
|
||||
- require:
|
||||
- file: actionconfs
|
||||
- file: curconf
|
||||
- file: curlogdir
|
||||
- watch:
|
||||
- file: curconf
|
||||
|
||||
append_so-curator_so-status.conf:
|
||||
file.append:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- text: so-curator
|
||||
- unless: grep -q so-curator /opt/so/conf/so-status/so-status.conf
|
||||
{% if not CURATOROPTIONS.start %}
|
||||
so-curator_so-status.disabled:
|
||||
file.comment:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- regex: ^so-curator$
|
||||
{% else %}
|
||||
delete_so-curator_so-status.disabled:
|
||||
file.uncomment:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- regex: ^so-curator$
|
||||
{% endif %}
|
||||
|
||||
so-curator-cluster-close:
|
||||
cron.present:
|
||||
- name: /usr/sbin/so-curator-cluster-close > /opt/so/log/curator/cron-close.log 2>&1
|
||||
- identifier: so-curator-cluster-close
|
||||
- user: root
|
||||
- minute: '2'
|
||||
- hour: '*/1'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
so-curator-cluster-delete:
|
||||
cron.present:
|
||||
- name: /usr/sbin/so-curator-cluster-delete > /opt/so/log/curator/cron-cluster-delete.log 2>&1
|
||||
- identifier: so-curator-cluster-delete
|
||||
- user: root
|
||||
- minute: '*/5'
|
||||
- hour: '*'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
{% from 'curator/map.jinja' import CURATORMERGED %}
|
||||
|
||||
include:
|
||||
{% if CURATORMERGED.enabled %}
|
||||
- curator.enabled
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
- curator.disabled
|
||||
{% endif %}
|
||||
|
||||
@@ -1,18 +1,7 @@
|
||||
{% set CURATOROPTIONS = {} %}
|
||||
{% set ENABLED = salt['pillar.get']('curator:enabled', True) %}
|
||||
{% do CURATOROPTIONS.update({'manage_sostatus': True}) %}
|
||||
|
||||
# don't start the docker container if curator is disabled via pillar
|
||||
{% if not ENABLED %}
|
||||
{% do CURATOROPTIONS.update({'start': False}) %}
|
||||
{% do CURATOROPTIONS.update({'status': 'absent'}) %}
|
||||
{% if (TRUECLUSTER and grains.id.split('_')|last == 'searchnode') or (not TRUECLUSTER and grains.id.split('_')|last == 'manager') %}
|
||||
{% do CURATOROPTIONS.update({'manage_sostatus': False}) %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% do CURATOROPTIONS.update({'start': True}) %}
|
||||
{% do CURATOROPTIONS.update({'status': 'running'}) %}
|
||||
{% endif %}
|
||||
{# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
Elastic License 2.0. #}
|
||||
|
||||
{% import_yaml 'curator/defaults.yaml' as CURATORDEFAULTS %}
|
||||
{% set CURATORMERGED = salt['pillar.get']('elasticsearch:index_settings', CURATORDEFAULTS.elasticsearch.index_settings, merge=true) %}
|
||||
{% set CURATORMERGED = salt['pillar.get']('curator', CURATORDEFAULTS.curator, merge=true) %}
|
||||
|
||||
108
salt/curator/soc_curator.yaml
Normal file
@@ -0,0 +1,108 @@
|
||||
curator:
|
||||
enabled:
|
||||
description: You can enable or disable Curator.
|
||||
helpLink: curator.html
|
||||
elasticsearch:
|
||||
index_settings:
|
||||
logs-import-so:
|
||||
close: &close
|
||||
description: Age, in days, when Curator closes the index.
|
||||
helpLink: curator.html
|
||||
forcedType: int
|
||||
delete: &delete
|
||||
description: Age, in days, when Curator deletes the index.
|
||||
helpLink: curator.html
|
||||
forcedType: int
|
||||
logs-strelka-so:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-suricata-so:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-syslog-so:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-zeek-so:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-elastic_agent-metricbeat-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-elastic_agent-osquerybeat-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-elastic_agent-fleet_server-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-elastic_agent-filebeat-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-elastic_agent-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-system-auth-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-system-application-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-system-security-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-system-system-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-system-syslog-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-windows-powershell-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
logs-windows-sysmon_operational-default:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-beats:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-elasticsearch:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-firewall:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-ids:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-import:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-kratos:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-kibana:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-logstash:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-netflow:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-osquery:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-ossec:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-redis:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-strelka:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-syslog:
|
||||
close: *close
|
||||
delete: *delete
|
||||
so-zeek:
|
||||
close: *close
|
||||
delete: *delete
|
||||
21
salt/curator/sostatus.sls
Normal file
@@ -0,0 +1,21 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls.split('.')[0] in allowed_states %}
|
||||
|
||||
append_so-curator_so-status.conf:
|
||||
file.append:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- text: so-curator
|
||||
- unless: grep -q so-curator /opt/so/conf/so-status/so-status.conf
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
{% endif %}
|
||||
0
salt/common/tools/sbin/so-curator-restart → salt/curator/tools/sbin/so-curator-restart
Executable file → Normal file
0
salt/common/tools/sbin/so-curator-start → salt/curator/tools/sbin/so-curator-start
Executable file → Normal file
0
salt/common/tools/sbin/so-curator-stop → salt/curator/tools/sbin/so-curator-stop
Executable file → Normal file
@@ -10,44 +10,58 @@
|
||||
{%- set RETENTION = salt['pillar.get']('elasticsearch:retention', ELASTICDEFAULTS.elasticsearch.retention, merge=true) -%}
|
||||
|
||||
LOG="/opt/so/log/curator/so-curator-cluster-delete.log"
|
||||
LOG_SIZE_LIMIT=$(/usr/sbin/so-elasticsearch-cluster-space-total {{ RETENTION.retention_pct}})
|
||||
ALERT_LOG="/opt/so/log/curator/alert.log"
|
||||
LOG_SIZE_LIMIT_GB=$(/usr/sbin/so-elasticsearch-cluster-space-total {{ RETENTION.retention_pct}})
|
||||
LOG_SIZE_LIMIT=$(( "$LOG_SIZE_LIMIT_GB" * 1000 * 1000 * 1000 ))
|
||||
ITERATION=0
|
||||
MAX_ITERATIONS=10
|
||||
|
||||
overlimit() {
|
||||
[[ $(/usr/sbin/so-elasticsearch-cluster-space-used) -gt "${LOG_SIZE_LIMIT}" ]]
|
||||
[[ $(/usr/sbin/so-elasticsearch-cluster-space-used) -gt ${LOG_SIZE_LIMIT} ]]
|
||||
}
|
||||
|
||||
# Check to see if Elasticsearch indices using more disk space than LOG_SIZE_LIMIT
|
||||
# Closed indices will be deleted first. If we are able to bring disk space under LOG_SIZE_LIMIT, we will break out of the loop.
|
||||
while overlimit; do
|
||||
###########################
|
||||
# Check for 2 conditions: #
|
||||
###########################
|
||||
# 1. Check if Elasticsearch indices are using more disk space than LOG_SIZE_LIMIT
|
||||
# 2. Check if the maximum number of iterations - MAX_ITERATIONS - has been exceeded. If so, exit.
|
||||
# Closed indices will be deleted first. If we are able to bring disk space under LOG_SIZE_LIMIT, or the number of iterations has exceeded the maximum allowed number of iterations, we will break out of the loop.
|
||||
|
||||
while overlimit && [[ $ITERATION -lt $MAX_ITERATIONS ]]; do
|
||||
|
||||
# If we can't query Elasticsearch, then immediately return false.
|
||||
/usr/sbin/so-elasticsearch-query _cat/indices?h=index,status > /dev/null 2>&1
|
||||
[ $? -eq 1 ] && echo "$(date) - Could not query Elasticsearch." >> ${LOG} && exit
|
||||
|
||||
# We iterate through the closed and open indices
|
||||
CLOSED_INDICES=$(/usr/sbin/so-elasticsearch-query _cat/indices?h=index,status | grep 'close$' | awk '{print $1}' | grep -v "so-case" | grep -E "(logstash-|so-|.ds-logs-)" | sort -t- -k3)
|
||||
OPEN_INDICES=$(/usr/sbin/so-elasticsearch-query _cat/indices?h=index,status | grep 'open$' | awk '{print $1}' | grep -v "so-case" | grep -E "(logstash-|so-|.ds-logs-)" | sort -t- -k3)
|
||||
for INDEX in ${CLOSED_INDICES} ${OPEN_INDICES}; do
|
||||
# Now that we've sorted the indices from oldest to newest, we need to check each index to see if it is assigned as the current write index for a data stream
|
||||
CLOSED_INDICES=$(/usr/sbin/so-elasticsearch-query _cat/indices?h=index,status | grep 'close$' | awk '{print $1}' | grep -vE "playbook|so-case" | grep -E "(logstash-|so-|.ds-logs-)" | sort -t- -k3)
|
||||
OPEN_INDICES=$(/usr/sbin/so-elasticsearch-query _cat/indices?h=index,status | grep 'open$' | awk '{print $1}' | grep -vE "playbook|so-case" | grep -E "(logstash-|so-|.ds-logs-)" | sort -t- -k3)
|
||||
|
||||
for INDEX in ${CLOSED_INDICES} ${OPEN_INDICES}; do
|
||||
# Now that we've sorted the indices from oldest to newest, we need to check each index to see if it is assigned as the current write index for a data stream
|
||||
# To do so, we need to identify to which data stream this index is associated
|
||||
# We extract the data stream name using the pattern below
|
||||
DATASTREAM_PATTERN="logs-[a-zA-Z_.]+-[a-zA-Z_.]+"
|
||||
DATASTREAM=$(echo "${INDEX}" | grep -oE "$DATASTREAM_PATTERN")
|
||||
# We look up the data stream, and determine the write index. If there is only one backing index, we delete the entire data stream
|
||||
BACKING_INDICES=$(/usr/sbin/so-elasticsearch-query _data_stream/${DATASTREAM} | jq -r '.data_streams[0].indices | length')
|
||||
if [ "$BACKING_INDICES" -gt 1 ]; then
|
||||
BACKING_INDICES=$(/usr/sbin/so-elasticsearch-query _data_stream/${DATASTREAM} | jq -r '.data_streams[0].indices | length')
|
||||
if [ "$BACKING_INDICES" -gt 1 ]; then
|
||||
CURRENT_WRITE_INDEX=$(/usr/sbin/so-elasticsearch-query _data_stream/$DATASTREAM | jq -r .data_streams[0].indices[-1].index_name)
|
||||
# We make sure we are not trying to delete a write index
|
||||
# We make sure we are not trying to delete a write index
|
||||
if [ "${INDEX}" != "${CURRENT_WRITE_INDEX}" ]; then
|
||||
# This should not be a write index, so we should be allowed to delete it
|
||||
printf "\n$(date) - Used disk space exceeds LOG_SIZE_LIMIT (${LOG_SIZE_LIMIT} GB) - Deleting ${INDEX} index...\n" >> ${LOG}
|
||||
printf "\n$(date) - Used disk space exceeds LOG_SIZE_LIMIT (${LOG_SIZE_LIMIT_GB} GB) - Deleting ${INDEX} index...\n" >> ${LOG}
|
||||
/usr/sbin/so-elasticsearch-query ${INDEX} -XDELETE >> ${LOG} 2>&1
|
||||
fi
|
||||
else
|
||||
# We delete the entire data stream, since there is only one backing index
|
||||
printf "\n$(date) - Used disk space exceeds LOG_SIZE_LIMIT (${LOG_SIZE_LIMIT} GB) - Deleting ${DATASTREAM} data stream...\n" >> ${LOG}
|
||||
/usr/sbin/so-elasticsearch-query _data_stream/${DATASTREAM} -XDELETE >> ${LOG} 2>&1
|
||||
fi
|
||||
if ! overlimit; then
|
||||
fi
|
||||
if ! overlimit ; then
|
||||
exit
|
||||
fi
|
||||
((ITERATION++))
|
||||
done
|
||||
if [[ $ITERATION -ge $MAX_ITERATIONS ]]; then
|
||||
alert_id=$(uuidgen)
|
||||
printf "\n$(date) -> Maximum iteration limit reached ($MAX_ITERATIONS). Unable to bring disk below threshold. Writing alert ($alert_id) to ${ALERT_LOG}\n" >> ${LOG}
|
||||
printf "\n$(date),$alert_id,Maximum iteration limit reached ($MAX_ITERATIONS). Unable to bring disk below threshold.\n" >> ${ALERT_LOG}
|
||||
fi
|
||||
done
|
||||
|
Before Width: | Height: | Size: 269 KiB After Width: | Height: | Size: 269 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 319 KiB After Width: | Height: | Size: 319 KiB |
@@ -1,7 +1,7 @@
|
||||
include:
|
||||
- workstation.xwindows
|
||||
- desktop.xwindows
|
||||
{# If the master is 'salt' then the minion hasn't been configured and isn't connected to the grid. #}
|
||||
{# We need this since the trusted-ca state uses mine data. #}
|
||||
{% if grains.master != 'salt' %}
|
||||
- workstation.trusted-ca
|
||||
- desktop.trusted-ca
|
||||
{% endif %}
|
||||
310
salt/desktop/packages.sls
Normal file
@@ -0,0 +1,310 @@
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
|
||||
{# we only want this state to run it is CentOS #}
|
||||
{% if GLOBALS.os == 'Rocky' %}
|
||||
|
||||
|
||||
desktop_packages:
|
||||
pkg.installed:
|
||||
- pkgs:
|
||||
- NetworkManager
|
||||
- NetworkManager-adsl
|
||||
- NetworkManager-bluetooth
|
||||
- NetworkManager-l2tp-gnome
|
||||
- NetworkManager-libreswan-gnome
|
||||
- NetworkManager-openconnect-gnome
|
||||
- NetworkManager-openvpn-gnome
|
||||
- NetworkManager-ppp
|
||||
- NetworkManager-pptp-gnome
|
||||
- NetworkManager-team
|
||||
- NetworkManager-tui
|
||||
- NetworkManager-wifi
|
||||
- NetworkManager-wwan
|
||||
- PackageKit-gstreamer-plugin
|
||||
- aajohan-comfortaa-fonts
|
||||
- abattis-cantarell-fonts
|
||||
- acl
|
||||
- alsa-ucm
|
||||
- alsa-utils
|
||||
- anaconda
|
||||
- anaconda-install-env-deps
|
||||
- anaconda-live
|
||||
- at
|
||||
- attr
|
||||
- audit
|
||||
- authselect
|
||||
- basesystem
|
||||
- bash
|
||||
- bash-completion
|
||||
- bc
|
||||
- blktrace
|
||||
- bluez
|
||||
- bolt
|
||||
- bpftool
|
||||
- bzip2
|
||||
- chkconfig
|
||||
- chromium
|
||||
- chrony
|
||||
- cinnamon
|
||||
- cinnamon-control-center
|
||||
- cinnamon-screensaver
|
||||
- cockpit
|
||||
- coreutils
|
||||
- cpio
|
||||
- cronie
|
||||
- crontabs
|
||||
- crypto-policies
|
||||
- crypto-policies-scripts
|
||||
- cryptsetup
|
||||
- curl
|
||||
- cyrus-sasl-plain
|
||||
- dbus
|
||||
- dejavu-sans-fonts
|
||||
- dejavu-sans-mono-fonts
|
||||
- dejavu-serif-fonts
|
||||
- dnf
|
||||
- dnf-plugins-core
|
||||
- dos2unix
|
||||
- dosfstools
|
||||
- dracut-config-rescue
|
||||
- dracut-live
|
||||
- dsniff
|
||||
- e2fsprogs
|
||||
- ed
|
||||
- efi-filesystem
|
||||
- efibootmgr
|
||||
- efivar-libs
|
||||
- eom
|
||||
- ethtool
|
||||
- f36-backgrounds-extras-gnome
|
||||
- f36-backgrounds-gnome
|
||||
- f37-backgrounds-extras-gnome
|
||||
- f37-backgrounds-gnome
|
||||
- file
|
||||
- filesystem
|
||||
- firewall-config
|
||||
- firewalld
|
||||
- fprintd-pam
|
||||
- git
|
||||
- glibc
|
||||
- glibc-all-langpacks
|
||||
- gnome-calculator
|
||||
- gnome-disk-utility
|
||||
- gnome-screenshot
|
||||
- gnome-system-monitor
|
||||
- gnome-terminal
|
||||
- gnupg2
|
||||
- google-noto-emoji-color-fonts
|
||||
- google-noto-sans-cjk-ttc-fonts
|
||||
- google-noto-sans-gurmukhi-fonts
|
||||
- google-noto-sans-sinhala-vf-fonts
|
||||
- google-noto-serif-cjk-ttc-fonts
|
||||
- grub2-common
|
||||
- grub2-pc-modules
|
||||
- grub2-tools
|
||||
- grub2-tools-efi
|
||||
- grub2-tools-extra
|
||||
- grub2-tools-minimal
|
||||
- grubby
|
||||
- gstreamer1-plugins-bad-free
|
||||
- gstreamer1-plugins-good
|
||||
- gstreamer1-plugins-ugly-free
|
||||
- gvfs-gphoto2
|
||||
- gvfs-mtp
|
||||
- gvfs-smb
|
||||
- hostname
|
||||
- hyperv-daemons
|
||||
- ibus-anthy
|
||||
- ibus-hangul
|
||||
- ibus-libpinyin
|
||||
- ibus-libzhuyin
|
||||
- ibus-m17n
|
||||
- ibus-typing-booster
|
||||
- imsettings-systemd
|
||||
- initial-setup-gui
|
||||
- initscripts
|
||||
- initscripts-rename-device
|
||||
- iproute
|
||||
- iproute-tc
|
||||
- iprutils
|
||||
- iputils
|
||||
- irqbalance
|
||||
- iwl100-firmware
|
||||
- iwl1000-firmware
|
||||
- iwl105-firmware
|
||||
- iwl135-firmware
|
||||
- iwl2000-firmware
|
||||
- iwl2030-firmware
|
||||
- iwl3160-firmware
|
||||
- iwl5000-firmware
|
||||
- iwl5150-firmware
|
||||
- iwl6000g2a-firmware
|
||||
- iwl6000g2b-firmware
|
||||
- iwl6050-firmware
|
||||
- iwl7260-firmware
|
||||
- jomolhari-fonts
|
||||
- julietaula-montserrat-fonts
|
||||
- kbd
|
||||
- kernel
|
||||
- kernel-modules
|
||||
- kernel-modules-extra
|
||||
- kernel-tools
|
||||
- kexec-tools
|
||||
- khmer-os-system-fonts
|
||||
- kmod-kvdo
|
||||
- kpatch
|
||||
- kpatch-dnf
|
||||
- ledmon
|
||||
- less
|
||||
- liberation-mono-fonts
|
||||
- liberation-sans-fonts
|
||||
- liberation-serif-fonts
|
||||
- libertas-sd8787-firmware
|
||||
- libstoragemgmt
|
||||
- libsysfs
|
||||
- lightdm
|
||||
- linux-firmware
|
||||
- logrotate
|
||||
- lohit-assamese-fonts
|
||||
- lohit-bengali-fonts
|
||||
- lohit-devanagari-fonts
|
||||
- lohit-gujarati-fonts
|
||||
- lohit-kannada-fonts
|
||||
- lohit-odia-fonts
|
||||
- lohit-tamil-fonts
|
||||
- lohit-telugu-fonts
|
||||
- lshw
|
||||
- lsof
|
||||
- lsscsi
|
||||
- lvm2
|
||||
- mailcap
|
||||
- man-db
|
||||
- man-pages
|
||||
- mcelog
|
||||
- mdadm
|
||||
- memtest86+
|
||||
- metacity
|
||||
- microcode_ctl
|
||||
- mlocate
|
||||
- mtr
|
||||
- nano
|
||||
- ncurses
|
||||
- nemo-fileroller
|
||||
- nemo-image-converter
|
||||
- nemo-preview
|
||||
- net-tools
|
||||
- netronome-firmware
|
||||
- ngrep
|
||||
- nm-connection-editor
|
||||
- nmap-ncat
|
||||
- nvme-cli
|
||||
- open-vm-tools-desktop
|
||||
- openssh-clients
|
||||
- openssh-server
|
||||
- p11-kit
|
||||
- paktype-naskh-basic-fonts
|
||||
- parole
|
||||
- parted
|
||||
- passwd
|
||||
- pciutils
|
||||
- pinfo
|
||||
- pipewire
|
||||
- pipewire-alsa
|
||||
- pipewire-gstreamer
|
||||
- pipewire-jack-audio-connection-kit
|
||||
- pipewire-pulseaudio
|
||||
- pipewire-utils
|
||||
- plymouth
|
||||
- policycoreutils
|
||||
- powerline
|
||||
- ppp
|
||||
- prefixdevname
|
||||
- procps-ng
|
||||
- psacct
|
||||
- pt-sans-fonts
|
||||
- python3-libselinux
|
||||
- python3-scapy
|
||||
- qemu-guest-agent
|
||||
- quota
|
||||
- realmd
|
||||
- redshift-gtk
|
||||
- rocky-backgrounds
|
||||
- rocky-release
|
||||
- rootfiles
|
||||
- rpm
|
||||
- rpm-plugin-audit
|
||||
- rsync
|
||||
- rsyslog
|
||||
- rsyslog-gnutls
|
||||
- rsyslog-gssapi
|
||||
- rsyslog-relp
|
||||
- salt-minion
|
||||
- sane-backends-drivers-scanners
|
||||
- selinux-policy-targeted
|
||||
- setroubleshoot
|
||||
- setup
|
||||
- sg3_utils
|
||||
- sg3_utils-libs
|
||||
- shadow-utils
|
||||
- sil-abyssinica-fonts
|
||||
- sil-nuosu-fonts
|
||||
- sil-padauk-fonts
|
||||
- slick-greeter
|
||||
- slick-greeter-cinnamon
|
||||
- smartmontools
|
||||
- smc-meera-fonts
|
||||
- sos
|
||||
- spice-vdagent
|
||||
- ssldump
|
||||
- sssd
|
||||
- sssd-common
|
||||
- sssd-kcm
|
||||
- stix-fonts
|
||||
- strace
|
||||
- sudo
|
||||
- symlinks
|
||||
- syslinux
|
||||
- systemd
|
||||
- systemd-udev
|
||||
- tar
|
||||
- tcpdump
|
||||
- tcpflow
|
||||
- teamd
|
||||
- thai-scalable-waree-fonts
|
||||
- time
|
||||
- tmux
|
||||
- tmux-powerline
|
||||
- transmission
|
||||
- tree
|
||||
- tuned
|
||||
- unzip
|
||||
- usb_modeswitch
|
||||
- usbutils
|
||||
- util-linux
|
||||
- util-linux-user
|
||||
- vdo
|
||||
- vim-enhanced
|
||||
- vim-minimal
|
||||
- vim-powerline
|
||||
- virt-what
|
||||
- wget
|
||||
- whois
|
||||
- which
|
||||
- wireplumber
|
||||
- wireshark
|
||||
- words
|
||||
- xdg-user-dirs-gtk
|
||||
- xed
|
||||
- xfsdump
|
||||
- xfsprogs
|
||||
- xreader
|
||||
- yum
|
||||
- zip
|
||||
|
||||
{% else %}
|
||||
|
||||
desktop_packages_os_fail:
|
||||
test.fail_without_changes:
|
||||
- comment: 'SO desktop can only be installed on Rocky'
|
||||
|
||||
{% endif %}
|
||||
@@ -10,8 +10,8 @@ remove_graphical_target:
|
||||
- force: True
|
||||
|
||||
{% else %}
|
||||
workstation_trusted-ca_os_fail:
|
||||
desktop_trusted-ca_os_fail:
|
||||
test.fail_without_changes:
|
||||
- comment: 'SO Analyst Workstation can only be installed on CentOS'
|
||||
- comment: 'SO Desktop can only be installed on Rocky'
|
||||
|
||||
{% endif %}
|
||||
@@ -29,8 +29,8 @@ update_ca_certs:
|
||||
|
||||
{% else %}
|
||||
|
||||
workstation_trusted-ca_os_fail:
|
||||
desktop_trusted-ca_os_fail:
|
||||
test.fail_without_changes:
|
||||
- comment: 'SO Analyst Workstation can only be installed on CentOS'
|
||||
- comment: 'SO Desktop can only be installed on CentOS'
|
||||
|
||||
{% endif %}
|
||||
@@ -4,7 +4,7 @@
|
||||
{% if GLOBALS.os == 'Rocky' %}
|
||||
|
||||
include:
|
||||
- workstation.packages
|
||||
- desktop.packages
|
||||
|
||||
graphical_target:
|
||||
file.symlink:
|
||||
@@ -12,13 +12,12 @@ graphical_target:
|
||||
- target: /lib/systemd/system/graphical.target
|
||||
- force: True
|
||||
- require:
|
||||
- pkg: X Window System
|
||||
- pkg: graphical_extras
|
||||
- desktop_packages
|
||||
|
||||
{% else %}
|
||||
|
||||
workstation_xwindows_os_fail:
|
||||
desktop_xwindows_os_fail:
|
||||
test.fail_without_changes:
|
||||
- comment: 'SO Analyst Workstation can only be installed on CentOS'
|
||||
- comment: 'SO Desktop can only be installed on Rocky'
|
||||
|
||||
{% endif %}
|
||||