|
15 | 15 | from pynetflow.main import get_export_packets |
16 | 16 |
|
17 | 17 |
|
18 | | -logging.basicConfig(format='%(asctime)s | %(levelname)s | %(message)s', |
| 18 | +logging.basicConfig(format='%(asctime)s.%(msecs)03d | %(levelname)s | %(message)s', |
19 | 19 | datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG) |
20 | 20 | logging.addLevelName(logging.DEBUG, color("DBG", 7)) |
21 | 21 | logging.addLevelName(logging.INFO, "INF") |
|
26 | 26 |
|
27 | 27 | def process_netflow(netflow_port, named_pipe_filename): |
28 | 28 | # endless loop - read netflow packets, encode them to JSON and write them to named pipe: |
29 | | - with open(named_pipe_filename, "wb", 0) as fp: |
30 | | - for ts, client, export in get_export_packets('0.0.0.0', NETFLOW_PORT): |
31 | | - entry = { |
32 | | - "ts": ts, |
33 | | - "client": client, |
34 | | - "version": export.header.version, |
35 | | - "flows": [flow.data for flow in export.flows], |
36 | | - } |
37 | | - line = json.dumps(entry).encode() + b'\n' |
38 | | - fp.write(line) |
| 29 | + line = None |
| 30 | + last_record_seqs = {} |
| 31 | + while True: |
| 32 | + try: |
| 33 | + with open(named_pipe_filename, "wb", 0) as fp: |
| 34 | + # if named pipe threq an error for some reason (BrokenPipe), write the line we |
| 35 | + # have in buffer before listening to new packets: |
| 36 | + if line is not None: |
| 37 | + fp.write(line) |
| 38 | + line = None |
| 39 | + for ts, client, export in get_export_packets('0.0.0.0', NETFLOW_PORT): |
| 40 | + if export.header.version != 9: |
| 41 | + log.error(f"Only Netflow v9 currently supported, ignoring record (version: [{export.header.version}])") |
| 42 | + continue |
| 43 | + |
| 44 | + client_ip, _ = client |
| 45 | + |
| 46 | + # check for missing records: |
| 47 | + last_record_seq = last_record_seqs.get(client_ip) |
| 48 | + if last_record_seq is None: |
| 49 | + log.warning(f"Last record sequence number is not known, starting with {export.header.sequence}") |
| 50 | + elif export.header.sequence != last_record_seq + 1: |
| 51 | + log.error(f"Sequence number ({export.header.sequence}) does not follow ({last_record_seq}), some records might have been skipped") |
| 52 | + last_record_seqs[client_ip] = export.header.sequence |
| 53 | + |
| 54 | + flows_data = [flow.data for flow in export.flows] |
| 55 | + entry = { |
| 56 | + "ts": ts, |
| 57 | + "client": client_ip, |
| 58 | + "flows": [{ |
| 59 | + "IN_BYTES": data["IN_BYTES"], |
| 60 | + "PROTOCOL": data["PROTOCOL"], |
| 61 | + "DIRECTION": data["DIRECTION"], |
| 62 | + "INPUT_SNMP": data["INPUT_SNMP"], |
| 63 | + "L4_DST_PORT": data["L4_DST_PORT"], |
| 64 | + "L4_SRC_PORT": data["L4_SRC_PORT"], |
| 65 | + "OUTPUT_SNMP": data["OUTPUT_SNMP"], |
| 66 | + "IPV4_DST_ADDR": data["IPV4_DST_ADDR"], |
| 67 | + "IPV4_SRC_ADDR": data["IPV4_SRC_ADDR"], |
| 68 | + } for data in flows_data], |
| 69 | + } |
| 70 | + line = json.dumps(entry).encode() + b'\n' |
| 71 | + fp.write(line) |
| 72 | + line = None |
| 73 | + except Exception as ex: |
| 74 | + log.exception(f"Exception: {str(ex)}") |
| 75 | + |
39 | 76 |
|
40 | 77 |
|
41 | 78 | if __name__ == "__main__": |
|
0 commit comments