-
Notifications
You must be signed in to change notification settings - Fork 90
/
zircolite_dev.py
executable file
·1658 lines (1481 loc) · 84.3 KB
/
zircolite_dev.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!python3
# Standard libs
import argparse
import asyncio
import csv
import functools
import hashlib
import logging
import multiprocessing as mp
import os
import random
import re
import shutil
import signal
import socket
import sqlite3
import string
import subprocess
import sys
import time
from pathlib import Path
from sqlite3 import Error
from sys import platform as _platform
# External libs (Mandatory)
import orjson as json
import xxhash
from colorama import Fore
from tqdm import tqdm
from tqdm.asyncio import tqdm as tqdmAsync
# External libs (Optional)
forwardingDisabled = False
try:
import aiohttp
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
except ImportError:
forwardingDisabled = True
elasticForwardingDisabled = False
try:
from elasticsearch import AsyncElasticsearch
except ImportError:
elasticForwardingDisabled = True
updateDisabled = False
try:
import requests
except ImportError:
forwardingDisabled = True
updateDisabled = True
sigmaConversionDisabled = False
try:
from sigma.collection import SigmaCollection
from sigma.backends.sqlite import sqlite
from sigma.processing.resolver import ProcessingPipelineResolver
from sigma.plugins import InstalledSigmaPlugins
import yaml
except ImportError:
sigmaConversionDisabled = True
pyevtxDisabled = False
try:
from evtx import PyEvtxParser
except ImportError:
pyevtxDisabled = True
jinja2Disabled = False
try:
from jinja2 import Template
except ImportError:
jinja2Disabled = True
xmlImportDisabled = False
try:
from lxml import etree
except ImportError:
xmlImportDisabled = True
def signal_handler(sig, frame):
print("[-] Execution interrupted !")
sys.exit(0)
def quitOnError(message, logger=None):
logger.error(message)
sys.exit(1)
def checkIfExists(path, errorMessage, logger=None):
"""Test if path provided is a file"""
if not (Path(path).is_file()):
quitOnError(errorMessage, logger)
def initLogger(debugMode, logFile=None):
fileLogLevel = logging.INFO
fileLogFormat = "%(asctime)s %(levelname)-8s %(message)s"
if debugMode:
fileLogLevel = logging.DEBUG
fileLogFormat = "%(asctime)s %(levelname)-8s %(module)s:%(lineno)s %(funcName)s %(message)s"
if logFile is not None:
logging.basicConfig(format=fileLogFormat, filename=logFile, level=fileLogLevel, datefmt='%Y-%m-%d %H:%M:%S')
logger = logging.StreamHandler()
formatter = logging.Formatter('%(message)s')
logger.setFormatter(formatter)
logger.setLevel(logging.INFO)
logging.getLogger().addHandler(logger)
else:
logging.basicConfig(format='%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S')
return logging.getLogger()
class templateEngine:
def __init__(self, logger=None, template=[], templateOutput=[], timeField=""):
self.logger = logger or logging.getLogger(__name__)
self.template = template
self.templateOutput = templateOutput
self.timeField = timeField
def generateFromTemplate(self, templateFile, outputFilename, data):
""" Use Jinja2 to output data in a specific format """
try:
tmpl = open(templateFile, 'r', encoding='utf-8')
template = Template(tmpl.read())
with open(outputFilename, 'a', encoding='utf-8') as tpl:
tpl.write(template.render(data=data, timeField=self.timeField))
except Exception as e:
self.logger.error(f"{Fore.RED} [-] Template error, activate debug mode to check for errors{Fore.RESET}")
self.logger.debug(f" [-] {e}")
def run(self, data):
for template, templateOutput in zip(self.template, self.templateOutput):
self.logger.info(f'[+] Applying template "{template[0]}", outputting to : {templateOutput[0]}')
self.generateFromTemplate(template[0], templateOutput[0], data)
class eventForwarder:
""" Class for handling event forwarding """
def __init__(self, remote, timeField, token, logger=None, index=None, login='', password='', pipeline=''):
self.logger = logger or logging.getLogger(__name__)
self.remoteHost = remote
self.token = token
self.localHostname = socket.gethostname()
self.userAgent = "zircolite/2.x"
self.index = index
self.login = login
self.password = password
self.pipeline = pipeline
self.queueSize = 20
self.connectionFailed = False
self.timeField = timeField
def send(self, payloads, forwardAll=False):
if payloads:
if self.remoteHost:
try:
# Change EventLoopPolicy on Windows https://stackoverflow.com/questions/45600579/asyncio-event-loop-is-closed-when-getting-loop
if _platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
# Splunk HEC
if self.token:
asyncio.run(self.sendAllAsyncQueue(payloads, timeField=self.timeField, sigmaEvents=(not forwardAll), mode="HEC"))
# ElasticSearch
elif self.index:
self.disableESDefaultLogging()
asyncio.run(self.sendAllAsyncQueue(payloads, timeField=self.timeField, sigmaEvents=(not forwardAll), mode="ES"))
# HTTP
else:
asyncio.run(self.sendAllAsyncQueue(payloads, timeField=self.timeField, sigmaEvents=(not forwardAll), mode="HTTP"))
except Exception as e:
self.logger.debug(f"{Fore.RED} [-] {e}")
def networkCheck(self):
""" Check remote connectivity """
self.logger.info(f'[+] Check connectivity to {self.remoteHost}')
try:
requests.get(self.remoteHost, headers={'user-agent': self.userAgent}, timeout=10, verify=False)
except (requests.ConnectionError, requests.Timeout):
return False
return True
def formatToEpoch(self, timestamp):
try:
return str(time.mktime(time.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f%z'))) + timestamp.split(".")[1][:-1]
except ValueError:
try:
return str(time.mktime(time.strptime(timestamp, '%Y-%m-%dT%H:%M:%S%z'))) + timestamp.split(".")[1][:-1]
except Exception:
self.logger.debug(f"{Fore.RED} [-] Timestamp error: {timestamp}{Fore.RESET}")
def disableESDefaultLogging(self):
""" By Default Elastic client has a logger set to INFO level """
es_log = logging.getLogger("elasticsearch")
es_log.setLevel(logging.ERROR)
es_log = logging.getLogger("elastic_transport")
es_log.setLevel(logging.ERROR)
async def HECWorker(self, session, queue, sigmaEvents):
while True:
if self.index:
providedIndex = f"?index={self.index}"
else:
providedIndex = ""
data = await queue.get() # Pop data from Queue
resp = await session.post(f"{self.remoteHost}/services/collector/event{providedIndex}", headers={'Authorization': f"Splunk {self.token}"}, json=data) # Exec action from Queue
queue.task_done() # Notify Queue action ended
if str(resp.status)[0] in ["4", "5"]:
self.logger.error(f"{Fore.RED} [-] Forwarding failed for event {Fore.RESET}")
async def ESWorker(self, session, queue, sigmaEvents):
while True:
data = await queue.get() # Pop data from Queue
index = self.index
if sigmaEvents:
index = f'{self.index}-sigma'
else:
if "OriginalLogfile" in data["payload"]:
index = f'{index}-{("".join([char for char in data["payload"]["OriginalLogfile"].split(".")[0] if (char.isalpha() or char == "-")])).lower()}'
try:
await session.index(index=index, document=data["payload"], id=data["hash"]) # Exec action from Queue
except Exception as e:
if "error" in e.body:
if e.body["error"]["type"] == "mapper_parsing_exception":
errField = e.body["error"]["reason"].split("[")[1].split("]")[0]
errType = e.body["error"]["reason"].split("[")[2].split("]")[0]
errValue = e.body["error"]["reason"].split("value: '")[1].split("'")[0]
canInsert = False
if errType == "long" and errValue.startswith("0x"): # Hex value in long field
data["payload"][errField] = int(data["payload"][errField], 16)
canInsert = True
elif errType == "boolean" and errValue.startswith("0"): # 0 value in bool field
data["payload"][errField] = "false"
canInsert = True
elif errType == "boolean" and errValue.startswith("1"): # 1 value in bool field
data["payload"][errField] = "true"
canInsert = True
elif errType == "long" and isinstance((data["payload"][errField]), int) and data["payload"][errField] > (2**63 -1): # ES limit
data["payload"][errField] = 2 ** 63 - 1
canInsert = True
elif errType == "long" and isinstance((data["payload"][errField]), int) and data["payload"][errField] < -(2**63): # ES limit
data["payload"][errField] = -(2 ** 63)
canInsert = True
elif errType == "long" and isinstance(data["payload"][errField], argparse.BooleanOptionalAction):
if type(data["payload"][errField]):
data["payload"][errField] = 1
else:
data["payload"][errField] = 0
canInsert = True
else:
self.logger.debug(f"{Fore.RED} [-] ES Mapping parser error : {e}{Fore.RESET}")
if canInsert:
try:
await session.index(index=index, document=data["payload"], id=data["hash"])
except Exception as e:
self.logger.debug(f"{Fore.RED} [-] ES error : {e}{Fore.RESET}")
elif e.body["error"]["type"] == "illegal_argument_exception":
errField = e.body["error"]["reason"].split("[")[1].split("]")[0]
data["payload"].pop(errField, None) # remove value from payload
try:
await session.index(index=index, document=data["payload"], id=data["hash"])
except Exception as e:
self.logger.debug(f"{Fore.RED} [-] ES error : {e}{Fore.RESET}")
else:
self.logger.debug(f"{Fore.RED} [-] ES error : {e}{Fore.RESET}")
queue.task_done() # Notify Queue action ended
async def HTTPWorker(self, session, queue, sigmaEvents):
while True:
data = await queue.get() # Pop data from Queue
resp = await session.post(self.remoteHost, headers={"user-agent": self.userAgent}, json=data) # Exec action from Queue
queue.task_done() # Notify Queue action ended
if str(resp.status)[0] in ["4", "5"]:
self.logger.error(f"{Fore.RED} [-] Forwarding failed for event {Fore.RESET}")
def formatEventForES(self, payload, match={}, timeField="", sigmaEvents=False):
if self.pipeline != "":
payload["pipeline"] = self.pipeline
if sigmaEvents:
payload = {"title": payload["title"], "id": payload["id"],"sigmafile": payload["sigmafile"], "description": payload["description"], "sigma": payload["sigma"], "rule_level": payload["rule_level"], "tags": payload["tags"], "host": self.localHostname}
[payload.update({key: eval(value)}) if value in ["False", "True"] else payload.update({key: value}) for key, value in match.items()] # In detected events boolean are stored as strings
return {"payload": payload, "hash":xxhash.xxh64_hexdigest(str(payload))}
def formatEventForSplunk(self, payload, match={}, timeField="", sigmaEvents=False):
if sigmaEvents:
payload = {"title": payload["title"], "id": payload["id"],"sigmafile": payload["sigmafile"], "description": payload["description"], "sigma": payload["sigma"], "rule_level": payload["rule_level"], "tags": payload["tags"]}
[payload.update({key: value}) for key, value in match.items()]
if (timeField == ""):
return {"sourcetype": "_json", "event": payload, "host": self.localHostname }
elif (timeField not in payload):
self.logger.error(f"{Fore.RED} [-] Provided time field was not found {Fore.RESET}")
return {"sourcetype": "_json", "event": payload, "host": self.localHostname }
else:
return {"sourcetype": "_json", "event": payload, "host": self.localHostname, "time": self.formatToEpoch(payload[timeField])}
def formatEventForHTTTP(self, payload, match={}, timeField="", sigmaEvents=False):
payload.update({"host": self.localHostname})
return payload
def initESSession(self):
if self.login == "":
session = AsyncElasticsearch(hosts=[self.remoteHost], verify_certs=False)
else:
session = AsyncElasticsearch(hosts=[self.remoteHost], verify_certs=False, basic_auth=(self.login, self.password))
return session
async def testESSession(self, session):
try:
await session.info()
except Exception:
self.logger.error(f"{Fore.RED} [-] Connection to ES failed {Fore.RESET}")
await session.close()
self.connectionFailed = True
async def testSplunkSession(self, session):
data = {"sourcetype": "_json", "event": {}, "host": self.localHostname }
resp = await session.post(f"{self.remoteHost}/services/collector/event", headers={'Authorization': f"Splunk {self.token}"}, json=data)
if str(resp.status)[0] in ["4", "5"]:
await session.close()
self.logger.error(f"{Fore.RED} [-] Connection to Splunk HEC failed - Forwarding disabled {Fore.RESET}")
self.connectionFailed = True
async def testHTTPSession(self, session):
resp = await session.post(self.remoteHost, headers={"user-agent": self.userAgent}, json={})
if str(resp.status)[0] in ["4", "5"]:
await session.close()
self.logger.error(f"{Fore.RED} [-] Connection to HTTP Server failed - Forwarding disabled {Fore.RESET}")
self.connectionFailed = True
async def sendAllAsyncQueue(self, payloads, timeField="", sigmaEvents=False, mode=""):
if self.connectionFailed:
return
if mode == "ES":
session = self.initESSession()
await self.testESSession(session)
if self.connectionFailed:
return
fnformatEvent = self.formatEventForES
fnWorker = self.ESWorker
elif mode == "HEC":
session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False))
await self.testSplunkSession(session)
if self.connectionFailed:
return
fnformatEvent = self.formatEventForSplunk
fnWorker = self.HECWorker
elif mode == "HTTP":
session = aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False))
await self.testHTTPSession(session)
if self.connectionFailed:
return
fnformatEvent = self.formatEventForHTTTP
fnWorker = self.HTTPWorker
else:
return
# Init queue
queue = asyncio.Queue()
tasks = []
if not sigmaEvents:
self.logger.info('[+] Gathering events to forward')
payloads = tqdmAsync(payloads, colour="yellow")
for payload in payloads:
if sigmaEvents:
for match in payload["matches"]:
queue.put_nowait(fnformatEvent(payload=payload, match=match, timeField=timeField, sigmaEvents=sigmaEvents))
else:
queue.put_nowait(fnformatEvent(payload=payload, timeField=timeField, sigmaEvents=sigmaEvents))
# Create workers to process Queue
for i in range(20):
task = asyncio.create_task(fnWorker(session, queue, sigmaEvents=sigmaEvents))
tasks.append(task)
if not sigmaEvents:
self.logger.info(f'[+] Forwarding {queue.qsize()} events to {self.remoteHost} {Fore.CYAN}(Don\'t panic if nothing change for a long time){Fore.RESET}')
await queue.join()
# Cancel our worker tasks.
for task in tasks:
task.cancel()
# Wait until all worker tasks are cancelled.
await asyncio.gather(*tasks, return_exceptions=True)
await session.close()
class JSONFlattener:
""" Perform JSON Flattening """
def __init__(self, configFile, logger=None, timeAfter="1970-01-01T00:00:00", timeBefore="9999-12-12T23:59:59", timeField=None, hashes=False, JSONArray=False):
self.logger = logger or logging.getLogger(__name__)
self.keyDict = {}
self.fieldStmt = ""
self.valuesStmt = []
self.timeAfter = timeAfter
self.timeBefore = timeBefore
self.timeField = timeField
self.hashes = hashes
self.JSONArray = JSONArray
with open(configFile, 'r', encoding='UTF-8') as fieldMappingsFile:
self.fieldMappingsDict = json.loads(fieldMappingsFile.read())
self.fieldExclusions = self.fieldMappingsDict["exclusions"]
self.fieldMappings = self.fieldMappingsDict["mappings"]
self.uselessValues = self.fieldMappingsDict["useless"]
self.aliases = self.fieldMappingsDict["alias"]
self.fieldSplitList = self.fieldMappingsDict["split"]
def run(self, file):
"""
Flatten json object with nested keys into a single level.
Returns the flattened json object
"""
self.logger.debug(f"FLATTENING : {file}")
JSONLine = {}
JSONOutput = []
fieldStmt = ""
def flatten(x, name=''):
nonlocal fieldStmt
# If it is a Dict go deeper
if isinstance(x, dict):
for a in x:
flatten(x[a], name + a + '.')
else:
# Applying exclusions. Be careful, the key/value pair is discarded if there is a partial match
if not any(exclusion in name[:-1] for exclusion in self.fieldExclusions):
# Arrays are not expanded
if isinstance(x, list):
value = ''.join(str(x))
else:
value = x
# Excluding useless values (e.g. "null"). The value must be an exact match.
if value not in self.uselessValues:
# Applying field mappings
rawFieldName = name[:-1]
if rawFieldName in self.fieldMappings:
key = self.fieldMappings[rawFieldName]
else:
# Removing all annoying character from field name
key = ''.join(e for e in rawFieldName.split(".")[-1] if e.isalnum())
# Preparing aliases
keys = [key]
if key in self.aliases:
keys.append(self.aliases[key])
if rawFieldName in self.aliases:
keys.append(self.aliases[rawFieldName])
# Applying field splitting
fieldsToSplit = []
if rawFieldName in self.fieldSplitList:
fieldsToSplit.append(rawFieldName)
if key in self.fieldSplitList:
fieldsToSplit.append(key)
if len(fieldsToSplit) > 0:
for field in fieldsToSplit:
try:
splittedFields = value.split(self.fieldSplitList[field]["separator"])
for splittedField in splittedFields:
k,v = splittedField.split(self.fieldSplitList[field]["equal"])
keyLower = k.lower()
JSONLine[k] = v
if keyLower not in self.keyDict:
self.keyDict[keyLower] = k
fieldStmt += f"'{k}' TEXT COLLATE NOCASE,\n"
except Exception as e:
self.logger.debug(f"ERROR : Couldn't apply field splitting, value(s) {str(splittedFields)} : {e}")
# Applying aliases
for key in keys:
JSONLine[key] = value
# Creating the CREATE TABLE SQL statement
keyLower =key.lower()
if keyLower not in self.keyDict:
self.keyDict[keyLower] = key
if isinstance(value, int):
fieldStmt += f"'{key}' INTEGER,\n"
else:
fieldStmt += f"'{key}' TEXT COLLATE NOCASE,\n"
# If filesize is not zero
if os.stat(file).st_size != 0:
with open(str(file), 'r', encoding='utf-8') as JSONFile:
filename = os.path.basename(file)
logs = JSONFile
# If the file is a json array
if self.JSONArray:
try:
logs = json.loads(JSONFile.read())
except Exception as e:
self.logger.debug(f'JSON ARRAY ERROR : {e}')
logs = []
for line in logs:
try:
if self.JSONArray:
dictToFlatten = line
else:
dictToFlatten = json.loads(line)
dictToFlatten.update({"OriginalLogfile": filename})
if self.hashes:
dictToFlatten.update({"OriginalLogLinexxHash": xxhash.xxh64_hexdigest(line[:-1])})
flatten(dictToFlatten)
except Exception as e:
self.logger.debug(f'JSON ERROR : {e}')
# Handle timestamp filters
if (self.timeAfter != "1970-01-01T00:00:00" or self.timeBefore != "9999-12-12T23:59:59") and (self.timeField in JSONLine):
try:
timestamp = time.strptime(JSONLine[self.timeField].split(".")[0].replace("Z",""), '%Y-%m-%dT%H:%M:%S')
if timestamp > self.timeAfter and timestamp < self.timeBefore:
JSONOutput.append(JSONLine)
except Exception:
JSONOutput.append(JSONLine)
else:
JSONOutput.append(JSONLine)
JSONLine = {}
return {"dbFields": fieldStmt, "dbValues": JSONOutput}
def runAll(self, EVTXJSONList):
for evtxJSON in tqdm(EVTXJSONList, colour="yellow"):
if os.stat(evtxJSON).st_size != 0:
results = self.run(evtxJSON)
self.fieldStmt += results["dbFields"]
self.valuesStmt += results["dbValues"]
class zirCore:
""" Load data into database and apply detection rules """
def __init__(self, config, logger=None, noOutput=False, timeAfter="1970-01-01T00:00:00", timeBefore="9999-12-12T23:59:59", limit=-1, csvMode=False, timeField=None, hashes=False, dbLocation=":memory:", delimiter=";"):
self.logger = logger or logging.getLogger(__name__)
self.dbConnection = self.createConnection(dbLocation)
self.fullResults = []
self.ruleset = {}
self.noOutput = noOutput
self.timeAfter = timeAfter
self.timeBefore = timeBefore
self.config = config
self.limit = limit
self.csvMode = csvMode
self.timeField = timeField
self.hashes = hashes
self.delimiter = delimiter
def close(self):
self.dbConnection.close()
def createConnection(self, db):
""" create a database connection to a SQLite database """
conn = None
self.logger.debug(f"CONNECTING TO : {db}")
try:
conn = sqlite3.connect(db)
conn.row_factory = sqlite3.Row # Allows to get a dict
def udf_regex(x, y):
if y is None:
return 0
if re.search(x, y):
return 1
else:
return 0
conn.create_function('regexp', 2, udf_regex) # Allows to use regex in SQlite
except Error as e:
self.logger.error(f"{Fore.RED} [-] {e}")
return conn
def createDb(self, fieldStmt):
createTableStmt = f"CREATE TABLE logs ( row_id INTEGER, {fieldStmt} PRIMARY KEY(row_id AUTOINCREMENT) );"
self.logger.debug(" CREATE : " + createTableStmt.replace('\n', ' ').replace('\r', ''))
if not self.executeQuery(createTableStmt):
self.logger.error(f"{Fore.RED} [-] Unable to create table{Fore.RESET}")
sys.exit(1)
def createIndex(self):
self.executeQuery('CREATE INDEX "idx_eventid" ON "logs" ("eventid");')
def executeQuery(self, query):
""" Perform a SQL Query with the provided connection """
if self.dbConnection is not None:
dbHandle = self.dbConnection.cursor()
self.logger.debug(f"EXECUTING : {query}")
try:
dbHandle.execute(query)
self.dbConnection.commit()
return True
except Error as e:
self.logger.debug(f" [-] {e}")
return False
else:
self.logger.error(f"{Fore.RED} [-] No connection to Db{Fore.RESET}")
return False
def executeSelectQuery(self, query):
""" Perform a SQL Query -SELECT only- with the provided connection """
if self.dbConnection is not None:
dbHandle = self.dbConnection.cursor()
self.logger.debug(f"EXECUTING : {query}")
try:
data = dbHandle.execute(query)
return data
except Error as e:
self.logger.debug(f" [-] {e}")
return {}
else:
self.logger.error(f"{Fore.RED} [-] No connection to Db{Fore.RESET}")
return {}
def loadDbInMemory(self, db):
""" In db only mode it is possible to restore an on disk Db to avoid EVTX extraction and flattening """
dbfileConnection = self.createConnection(db)
dbfileConnection.backup(self.dbConnection)
dbfileConnection.close()
def insertData2Db(self, JSONLine):
""" Build INSERT INTO Query and insert data into Db """
columnsStr = ""
valuesStr = ""
for key in sorted(JSONLine.keys()):
columnsStr += "'" + key + "',"
if isinstance(JSONLine[key], int):
valuesStr += str(JSONLine[key]) + ", "
else:
valuesStr += "'" + str(JSONLine[key]).replace("'", "''") + "', "
insertStrmt = f"INSERT INTO logs ({columnsStr[:-1]}) VALUES ({valuesStr[:-2]});"
return self.executeQuery(insertStrmt)
def insertFlattenedJSON2Db(self, flattenedJSON, forwarder=None):
if forwarder:
forwarder.send(flattenedJSON, forwardAll=True)
for JSONLine in tqdm(flattenedJSON, colour="yellow"):
self.insertData2Db(JSONLine)
self.createIndex()
def saveFlattenedJSON2File(self, flattenedJSON, outputFile):
with open(outputFile, 'w', encoding='utf-8') as file:
for JSONLine in tqdm(flattenedJSON, colour="yellow"):
file.write(json.dumps(JSONLine).decode('utf-8') + '\n')
def saveDbToDisk(self, dbFilename):
self.logger.info("[+] Saving working data to disk as a SQLite DB")
onDiskDb = sqlite3.connect(dbFilename)
self.dbConnection.backup(onDiskDb)
onDiskDb.close()
def executeRule(self, rule):
results = {}
filteredRows = []
counter = 0
if "rule" in rule:
# for each SQL Query in the Sigma rule
for SQLQuery in rule["rule"]:
data = self.executeSelectQuery(SQLQuery)
if data != {}:
# Convert to array of dict
rows = [dict(row) for row in data.fetchall()]
if len(rows) > 0:
counter += len(rows)
for row in rows:
if self.csvMode: # Cleaning "annoying" values for CSV
match = {k: str(v).replace("\n","").replace("\r","").replace("None","") for k, v in row.items()}
else: # Cleaning null/None fields
match = {k: v for k, v in row.items() if v is not None}
filteredRows.append(match)
if "level" not in rule:
rule["level"] = "unknown"
if "tags" not in rule:
rule["tags"] = []
if "filename" not in rule:
rule["filename"] = ""
if self.csvMode:
results = ({"title": rule["title"], "id": rule["id"], "description": rule["description"].replace("\n","").replace("\r",""), "sigmafile": rule["filename"], "sigma": rule["rule"], "rule_level": rule["level"], "tags": rule["tags"], "count": counter, "matches": filteredRows})
else:
results = ({"title": rule["title"], "id": rule["id"], "description": rule["description"], "sigmafile": rule["filename"], "sigma": rule["rule"], "rule_level": rule["level"], "tags": rule["tags"], "count": counter, "matches": filteredRows})
if counter > 0:
self.logger.debug(f'DETECTED : {rule["title"]} - Matches : {counter} events')
else:
self.logger.debug("RULE FORMAT ERROR : rule key Missing")
if filteredRows == []:
return {}
return results
def loadRulesetFromFile(self, filename, ruleFilters):
try:
with open(filename, encoding='utf-8') as f:
self.ruleset = json.loads(f.read())
self.applyRulesetFilters(ruleFilters)
except Exception as e:
self.logger.error(f"{Fore.RED} [-] Loading JSON ruleset failed, are you sure it is a valid JSON file ? : {e}{Fore.RESET}")
def loadRulesetFromVar(self, ruleset, ruleFilters):
self.ruleset = ruleset
self.applyRulesetFilters(ruleFilters)
def applyRulesetFilters(self, ruleFilters=None):
# Remove empty rule and remove filtered rules
self.ruleset = list(filter(None, self.ruleset))
if ruleFilters is not None:
self.ruleset = [rule for rule in self.ruleset if not any(ruleFilter in rule["title"] for ruleFilter in ruleFilters)]
def ruleLevelPrintFormatter(self, level, orgFormat=Fore.RESET):
if level == "informational":
return f'{Fore.WHITE}{level}{orgFormat}'
if level == "low":
return f'{Fore.GREEN}{level}{orgFormat}'
if level == "medium":
return f'{Fore.YELLOW}{level}{orgFormat}'
if level == "high":
return f'{Fore.MAGENTA}{level}{orgFormat}'
if level == "critical":
return f'{Fore.RED}{level}{orgFormat}'
def executeRuleset(self, outFile, writeMode='w', forwarder=None, showAll=False, KeepResults=False, remote=None, stream=False, lastRuleset=False):
csvWriter = None
# Results are written upon detection to allow analysis during execution and to avoid losing results in case of error.
with open(outFile, writeMode, encoding='utf-8', newline='') as fileHandle:
with tqdm(self.ruleset, colour="yellow") as ruleBar:
if not self.noOutput and not self.csvMode and writeMode != "a":
fileHandle.write('[')
for rule in ruleBar: # for each rule in ruleset
if showAll and "title" in rule:
ruleBar.write(f'{Fore.BLUE} - {rule["title"]} [{self.ruleLevelPrintFormatter(rule["level"], Fore.BLUE)}]{Fore.RESET}') # Print all rules
ruleResults = self.executeRule(rule)
if ruleResults != {}:
if self.limit == -1 or ruleResults["count"] <= self.limit:
ruleBar.write(f'{Fore.CYAN} - {ruleResults["title"]} [{self.ruleLevelPrintFormatter(rule["level"], Fore.CYAN)}] : {ruleResults["count"]} events{Fore.RESET}')
# Store results for templating and event forwarding (only if stream mode is disabled)
if KeepResults or (remote is not None and not stream):
self.fullResults.append(ruleResults)
if stream and forwarder is not None:
forwarder.send([ruleResults], False)
if not self.noOutput:
# To avoid printing this twice on stdout but in the logs...
logLevel = self.logger.getEffectiveLevel()
self.logger.setLevel(logging.DEBUG)
self.logger.debug(f' - {ruleResults["title"]} [{ruleResults["rule_level"]}] : {ruleResults["count"]} events')
self.logger.setLevel(logLevel)
# Output to json or csv file
if self.csvMode:
if not csvWriter: # Creating the CSV header and the fields ("agg" is for queries with aggregation)
csvWriter = csv.DictWriter(fileHandle, delimiter=self.delimiter, fieldnames=["rule_title", "rule_description", "rule_level", "rule_count", "agg"] + list(ruleResults["matches"][0].keys()))
csvWriter.writeheader()
for data in ruleResults["matches"]:
dictCSV = { "rule_title": ruleResults["title"], "rule_description": ruleResults["description"], "rule_level": ruleResults["rule_level"], "rule_count": ruleResults["count"], **data}
csvWriter.writerow(dictCSV)
else:
try:
fileHandle.write(json.dumps(ruleResults, option=json.OPT_INDENT_2).decode('utf-8'))
fileHandle.write(',\n')
except Exception as e:
self.logger.error(f"{Fore.RED} [-] Error saving some results : {e}{Fore.RESET}")
if (not self.noOutput and not self.csvMode) and lastRuleset:
fileHandle.write('{}]') # Added to produce a valid JSON Array
def run(self, EVTXJSONList, Insert2Db=True, saveToFile=False, forwarder=None, JSONArray=False):
self.logger.info("[+] Processing events")
flattener = JSONFlattener(configFile=self.config, timeAfter=self.timeAfter, timeBefore=self.timeBefore, timeField=self.timeField, hashes=self.hashes, JSONArray=JSONArray)
flattener.runAll(EVTXJSONList)
if saveToFile:
filename = f"flattened_events_{''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(4))}.json"
self.logger.info(f"[+] Saving flattened JSON to : {filename}")
self.saveFlattenedJSON2File(flattener.valuesStmt, filename)
if Insert2Db:
self.logger.info("[+] Creating model")
self.createDb(flattener.fieldStmt)
self.logger.info("[+] Inserting data")
self.insertFlattenedJSON2Db(flattener.valuesStmt, forwarder)
self.logger.info("[+] Cleaning unused objects")
else:
return flattener.keyDict
del flattener
class evtxExtractor:
def __init__(self, logger=None, providedTmpDir=None, coreCount=None, useExternalBinaries=True, binPath = None, xmlLogs=False, sysmon4linux=False, auditdLogs=False, encoding=None, evtxtract=False, csvInput=False):
self.logger = logger or logging.getLogger(__name__)
if Path(str(providedTmpDir)).is_dir():
self.tmpDir = f"tmp-{self.randString()}"
self.logger.error(f"{Fore.RED} [-] Provided directory already exists using '{self.tmpDir}' instead{Fore.RESET}")
else:
self.tmpDir = providedTmpDir or f"tmp-{self.randString()}"
os.mkdir(self.tmpDir)
self.cores = coreCount or os.cpu_count()
self.useExternalBinaries = useExternalBinaries
self.sysmon4linux = sysmon4linux
self.xmlLogs = xmlLogs
self.auditdLogs = auditdLogs
self.evtxtract = evtxtract
self.csvInput = csvInput
# Hardcoded hash list of evtx_dump binaries
self.validHashList = ["bbcce464533e0364", "e642f5c23e156deb", "5a7a1005885a1a11"]
# Sysmon 4 Linux default encoding is ISO-8859-1, Auditd is UTF-8
if not encoding and sysmon4linux:
self.encoding = "ISO-8859-1"
elif not encoding and (auditdLogs or evtxtract or xmlLogs):
self.encoding = "utf-8"
else:
self.encoding = encoding
self.evtxDumpCmd = self.getOSExternalTools(binPath)
def randString(self):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(8))
def getOSExternalTools(self, binPath):
""" Determine which binaries to run depending on host OS : 32Bits is NOT supported for now since evtx_dump is 64bits only"""
if binPath is None:
if _platform == "linux" or _platform == "linux2":
return "bin/evtx_dump_lin"
elif _platform == "darwin":
return "bin/evtx_dump_mac"
elif _platform == "win32":
return "bin\\evtx_dump_win.exe"
else:
return binPath
def runUsingBindings(self, file):
"""
Convert EVTX to JSON using evtx_dump bindings (slower)
Drop resulting JSON files in a tmp folder.
"""
if not self.useExternalBinaries:
try:
filepath = Path(file)
filename = filepath.name
parser = PyEvtxParser(str(filepath))
with open(f"{self.tmpDir}/{str(filename)}-{self.randString()}.json", "w", encoding="utf-8") as f:
for record in parser.records_json():
f.write(f'{json.dumps(json.loads(record["data"])).decode("utf-8")}\n')
except Exception as e:
self.logger.error(f"{Fore.RED} [-] Cannot use PyEvtxParser{Fore.RESET}")
else:
self.logger.error(f"{Fore.RED} [-] Cannot use PyEvtxParser and evtx_dump is disabled or missing{Fore.RESET}")
def getTime(self, line):
timestamp = line.replace('msg=audit(','').replace('):','').split(':')
timestamp = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(timestamp[0])))
return timestamp
def auditdLine2JSON(self, auditdLine):
"""
Convert auditd logs to JSON : code from https://github.com/csark/audit2json
"""
event = {}
# According to auditd specs https://github.com/linux-audit/audit-documentation/wiki/SPEC-Audit-Event-Enrichment
# a GS ASCII character, 0x1D, will be inserted to separate original and translated fields
# Best way to deal with it is to remove it.
attributes = auditdLine.replace('\x1d',' ').split(' ')
for attribute in attributes:
if 'msg=audit' in attribute:
event['timestamp'] = self.getTime(attribute)
else:
try:
attribute = attribute.replace('msg=','').replace('\'','').replace('"','').split('=')
if 'cmd' in attribute[0] or 'proctitle' in attribute[0]:
attribute[1] = str(bytearray.fromhex(attribute[1]).decode()).replace('\x00',' ')
event[attribute[0]] = attribute[1].rstrip()
except Exception:
pass
if "host" not in event:
event['host'] = 'offline'
return event
def SysmonXMLLine2JSON(self, xmlLine):
"""
Remove syslog header and convert xml data to json : code from ZikyHD (https://github.com/ZikyHD)
"""
if 'Event' not in xmlLine:
return None
xmlLine = "<Event>" + xmlLine.split("<Event>")[1]
try: # isolate individual line parsing errors
root = etree.fromstring(xmlLine)
return self.xml2dict(root)
except Exception as ex:
self.logger.debug(f"Unable to parse line \"{xmlLine}\": {ex}")
return None
def XMLLine2JSON(self, xmlLine):
"""
Remove "Events" header and convert xml data to json : code from ZikyHD (https://github.com/ZikyHD)
"""
if '<Event ' not in xmlLine:
return None
try: # isolate individual line parsing errors
root = etree.fromstring(xmlLine)
return self.xml2dict(root, u'{http://schemas.microsoft.com/win/2004/08/events/event}')
except Exception as ex:
self.logger.debug(f"Unable to parse line \"{xmlLine}\": {ex}")
return None
def xml2dict(self, eventRoot, ns=u'http://schemas.microsoft.com/win/2004/08/events/event'):
def cleanTag(tag, ns):
if ns in tag:
return tag[len(ns):]
return tag
child = {"#attributes": {"xmlns": ns}}
for appt in eventRoot.getchildren():
nodename = cleanTag(appt.tag,ns)
nodevalue = {}
for elem in appt.getchildren():
cleanedTag = cleanTag(elem.tag,ns)
if not elem.text:
text = ""
else:
try:
text = int(elem.text)
except Exception:
text = elem.text
if cleanedTag == 'Data':
childnode = elem.get("Name")
elif cleanedTag == 'Qualifiers':
text = elem.text
else:
childnode = cleanedTag
if elem.attrib:
text = {"#attributes": dict(elem.attrib)}
obj={str(childnode):text}
nodevalue = {**nodevalue, **obj}
node = {str(nodename): nodevalue}
child = {**child, **node}
event = { "Event": child }
return event
def Logs2JSON(self, func, datasource, outfile, isFile=True):
"""
Use multiprocessing to convert supported log formats to JSON
"""
if isFile:
with open(datasource, "r", encoding=self.encoding) as fp:
data = fp.readlines()
else :
data = datasource.split("\n")
pool = mp.Pool(self.cores)
result = pool.map(func, data)
pool.close()
pool.join()
with open(outfile, "w", encoding="UTF-8") as fp:
for element in result:
if element is not None:
fp.write(json.dumps(element).decode("utf-8") + '\n')
def csv2JSON(self, CSVPath, JSONPath):
"""
Convert CSV Logs to JSON
"""
with open(CSVPath, encoding='utf-8') as CSVFile:
csvReader = csv.DictReader(CSVFile)
with open(JSONPath, 'w', encoding='utf-8') as JSONFile:
for row in csvReader:
JSONFile.write(json.dumps(row).decode("utf-8") + '\n')
def evtxtract2JSON(self, file, outfile):
"""
Convert EXVTXtract Logs to JSON using xml2dict and "dumps" it to a file
"""
# Load file as a string to add enclosing document since XML doesn't support multiple documents
with open(file, "r", encoding=self.encoding) as fp:
data = fp.read()
# Remove all non UTF-8 characters
data = bytes(data.replace('\x00','').replace('\x0B',''), 'utf-8').decode('utf-8', 'ignore')
data = f'<evtxtract>\n{data}\n</evtxtract>'
# Load the XML file
parser = etree.XMLParser(recover=True) # Recover=True allows the parser to ignore bad characters
root = etree.fromstring(data, parser=parser)
with open(outfile, "w", encoding="UTF-8") as fp:
for event in root.getchildren():
if "Event" in event.tag:
extractedEvent = self.xml2dict(event, u'{http://schemas.microsoft.com/win/2004/08/events/event}')
fp.write(json.dumps(extractedEvent).decode("utf-8") + '\n')
def verifyBinHash(self, binPath):
"""
Verify the hash of a binary (Hashes are hardcoded)
"""
hasher = xxhash.xxh64()
try:
# Open the file in binary mode and read chunks to hash
with open(binPath, 'rb') as f:
while chunk := f.read(4096): # Read chunks of 4096 bytes
hasher.update(chunk) # Update the hash with the chunk
if hasher.hexdigest() in self.validHashList:
return True
except Exception as e:
self.logger.error(f"{Fore.RED} [-] {e}{Fore.RESET}")
return False