code push
This commit is contained in:
162
fgex-lib/firegex/nfproxy/internals/__init__.py
Normal file
162
fgex-lib/firegex/nfproxy/internals/__init__.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from inspect import signature
|
||||
from firegex.nfproxy.internals.models import Action, FullStreamAction
|
||||
from firegex.nfproxy.internals.models import FilterHandler, PacketHandlerResult
|
||||
import functools
|
||||
from firegex.nfproxy.internals.data import DataStreamCtx
|
||||
from firegex.nfproxy.internals.exceptions import NotReadyToRun
|
||||
from firegex.nfproxy.internals.data import RawPacket
|
||||
|
||||
def context_call(glob, func, *args, **kargs):
|
||||
glob["__firegex_tmp_args"] = args
|
||||
glob["__firegex_tmp_kargs"] = kargs
|
||||
glob["__firege_tmp_call"] = func
|
||||
res = eval("__firege_tmp_call(*__firegex_tmp_args, **__firegex_tmp_kargs)", glob, glob)
|
||||
del glob["__firegex_tmp_args"]
|
||||
del glob["__firegex_tmp_kargs"]
|
||||
del glob["__firege_tmp_call"]
|
||||
return res
|
||||
|
||||
def generate_filter_structure(filters: list[str], proto:str, glob:dict) -> list[FilterHandler]:
|
||||
from firegex.nfproxy.models import type_annotations_associations
|
||||
if proto not in type_annotations_associations.keys():
|
||||
raise Exception("Invalid protocol")
|
||||
res = []
|
||||
valid_annotation_type = type_annotations_associations[proto]
|
||||
def add_func_to_list(func):
|
||||
if not callable(func):
|
||||
raise Exception(f"{func} is not a function")
|
||||
sig = signature(func)
|
||||
params_function = {}
|
||||
|
||||
for k, v in sig.parameters.items():
|
||||
if v.annotation in valid_annotation_type.keys():
|
||||
params_function[v.annotation] = valid_annotation_type[v.annotation]
|
||||
else:
|
||||
raise Exception(f"Invalid type annotation {v.annotation} for function {func.__name__}")
|
||||
|
||||
res.append(
|
||||
FilterHandler(
|
||||
func=func,
|
||||
name=func.__name__,
|
||||
params=params_function,
|
||||
proto=proto
|
||||
)
|
||||
)
|
||||
|
||||
for filter in filters:
|
||||
if not isinstance(filter, str):
|
||||
raise Exception("Invalid filter list: must be a list of strings")
|
||||
if filter in glob.keys():
|
||||
add_func_to_list(glob[filter])
|
||||
else:
|
||||
raise Exception(f"Filter {filter} not found")
|
||||
return res
|
||||
|
||||
def get_filters_info(code:str, proto:str) -> list[FilterHandler]:
|
||||
glob = {}
|
||||
exec(code, glob, glob)
|
||||
exec("import firegex.nfproxy", glob, glob)
|
||||
filters = eval("firegex.nfproxy.get_pyfilters()", glob, glob)
|
||||
try:
|
||||
return generate_filter_structure(filters, proto, glob)
|
||||
finally:
|
||||
exec("firegex.nfproxy.clear_pyfilter_registry()", glob, glob)
|
||||
|
||||
|
||||
def get_filter_names(code:str, proto:str) -> list[str]:
|
||||
return [ele.name for ele in get_filters_info(code, proto)]
|
||||
|
||||
def handle_packet(glob: dict) -> None:
|
||||
internal_data = DataStreamCtx(glob)
|
||||
print("I'm here", flush=True)
|
||||
cache_call = {} # Cache of the data handler calls
|
||||
|
||||
pkt_info = RawPacket._fetch_packet(internal_data)
|
||||
internal_data.current_pkt = pkt_info
|
||||
cache_call[RawPacket] = pkt_info
|
||||
|
||||
final_result = Action.ACCEPT
|
||||
data_size = len(pkt_info.data)
|
||||
|
||||
result = PacketHandlerResult(glob)
|
||||
|
||||
if internal_data.stream_size+data_size > internal_data.stream_max_size:
|
||||
match internal_data.full_stream_action:
|
||||
case FullStreamAction.FLUSH:
|
||||
internal_data.stream = []
|
||||
internal_data.stream_size = 0
|
||||
for func in internal_data.flush_action_set:
|
||||
func()
|
||||
case FullStreamAction.ACCEPT:
|
||||
result.action = Action.ACCEPT
|
||||
return result.set_result()
|
||||
case FullStreamAction.REJECT:
|
||||
result.action = Action.REJECT
|
||||
result.matched_by = "@MAX_STREAM_SIZE_REACHED"
|
||||
return result.set_result()
|
||||
case FullStreamAction.REJECT:
|
||||
result.action = Action.DROP
|
||||
result.matched_by = "@MAX_STREAM_SIZE_REACHED"
|
||||
return result.set_result()
|
||||
|
||||
internal_data.stream.append(pkt_info)
|
||||
internal_data.stream_size += data_size
|
||||
|
||||
func_name = None
|
||||
mangled_packet = None
|
||||
for filter in internal_data.filter_call_info:
|
||||
final_params = []
|
||||
skip_call = False
|
||||
for data_type, data_func in filter.params.items():
|
||||
if data_type not in cache_call.keys():
|
||||
try:
|
||||
cache_call[data_type] = data_func(internal_data)
|
||||
except NotReadyToRun:
|
||||
cache_call[data_type] = None
|
||||
skip_call = True
|
||||
break
|
||||
final_params.append(cache_call[data_type])
|
||||
if skip_call:
|
||||
continue
|
||||
res = context_call(glob, filter.func, *final_params)
|
||||
|
||||
if res is None:
|
||||
continue #ACCEPTED
|
||||
if not isinstance(res, Action):
|
||||
raise Exception(f"Invalid return type {type(res)} for function {filter.name}")
|
||||
if res == Action.MANGLE:
|
||||
mangled_packet = pkt_info.raw_packet
|
||||
if res != Action.ACCEPT:
|
||||
func_name = filter.name
|
||||
final_result = res
|
||||
break
|
||||
|
||||
result.action = final_result
|
||||
result.matched_by = func_name
|
||||
result.mangled_packet = mangled_packet
|
||||
|
||||
return result.set_result()
|
||||
|
||||
|
||||
def compile(glob:dict) -> None:
|
||||
internal_data = DataStreamCtx(glob)
|
||||
|
||||
glob["print"] = functools.partial(print, flush = True)
|
||||
|
||||
filters = glob["__firegex_pyfilter_enabled"]
|
||||
proto = glob["__firegex_proto"]
|
||||
|
||||
internal_data.filter_call_info = generate_filter_structure(filters, proto, glob)
|
||||
|
||||
if "FGEX_STREAM_MAX_SIZE" in glob and int(glob["FGEX_STREAM_MAX_SIZE"]) > 0:
|
||||
internal_data.stream_max_size = int(glob["FGEX_STREAM_MAX_SIZE"])
|
||||
else:
|
||||
internal_data.stream_max_size = 1*8e20 # 1MB default value
|
||||
|
||||
if "FGEX_FULL_STREAM_ACTION" in glob and isinstance(glob["FGEX_FULL_STREAM_ACTION"], FullStreamAction):
|
||||
internal_data.full_stream_action = glob["FGEX_FULL_STREAM_ACTION"]
|
||||
else:
|
||||
internal_data.full_stream_action = FullStreamAction.FLUSH
|
||||
|
||||
PacketHandlerResult(glob).reset_result()
|
||||
|
||||
190
fgex-lib/firegex/nfproxy/internals/data.py
Normal file
190
fgex-lib/firegex/nfproxy/internals/data.py
Normal file
@@ -0,0 +1,190 @@
|
||||
from firegex.nfproxy.internals.models import FilterHandler
|
||||
from typing import Callable
|
||||
|
||||
class RawPacket:
|
||||
"""
|
||||
class rapresentation of the nfqueue packet sent in this context by the c++ core
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
data: bytes,
|
||||
raw_packet: bytes,
|
||||
is_input: bool,
|
||||
is_ipv6: bool,
|
||||
is_tcp: bool,
|
||||
l4_size: int,
|
||||
):
|
||||
self.__data = bytes(data)
|
||||
self.__raw_packet = bytes(raw_packet)
|
||||
self.__is_input = bool(is_input)
|
||||
self.__is_ipv6 = bool(is_ipv6)
|
||||
self.__is_tcp = bool(is_tcp)
|
||||
self.__l4_size = int(l4_size)
|
||||
self.__raw_packet_header_size = len(self.__raw_packet)-self.__l4_size
|
||||
|
||||
@property
|
||||
def is_input(self) -> bool:
|
||||
return self.__is_input
|
||||
|
||||
@property
|
||||
def is_ipv6(self) -> bool:
|
||||
return self.__is_ipv6
|
||||
|
||||
@property
|
||||
def is_tcp(self) -> bool:
|
||||
return self.__is_tcp
|
||||
|
||||
@property
|
||||
def data(self) -> bytes:
|
||||
return self.__data
|
||||
|
||||
@property
|
||||
def l4_size(self) -> int:
|
||||
return self.__l4_size
|
||||
|
||||
@property
|
||||
def raw_packet_header_len(self) -> int:
|
||||
return self.__raw_packet_header_size
|
||||
|
||||
@property
|
||||
def l4_data(self) -> bytes:
|
||||
return self.__raw_packet[self.raw_packet_header_len:]
|
||||
|
||||
@l4_data.setter
|
||||
def l4_data(self, v:bytes):
|
||||
if not isinstance(v, bytes):
|
||||
raise Exception("Invalid data type, data MUST be of type bytes")
|
||||
#if len(v) != self.__l4_size:
|
||||
# raise Exception("Invalid data size, must be equal to the original packet header size (due to a technical limitation)")
|
||||
self.__raw_packet = self.__raw_packet[:self.raw_packet_header_len]+v
|
||||
self.__l4_size = len(v)
|
||||
|
||||
@property
|
||||
def raw_packet(self) -> bytes:
|
||||
return self.__raw_packet
|
||||
|
||||
@raw_packet.setter
|
||||
def raw_packet(self, v:bytes):
|
||||
if not isinstance(v, bytes):
|
||||
raise Exception("Invalid data type, data MUST be of type bytes")
|
||||
#if len(v) != len(self.__raw_packet):
|
||||
# raise Exception("Invalid data size, must be equal to the original packet size (due to a technical limitation)")
|
||||
if len(v) < self.raw_packet_header_len:
|
||||
raise Exception("Invalid data size, must be greater than the original packet header size")
|
||||
self.__raw_packet = v
|
||||
self.__l4_size = len(v)-self.raw_packet_header_len
|
||||
|
||||
@classmethod
|
||||
def _fetch_packet(cls, internal_data):
|
||||
from firegex.nfproxy.internals.data import DataStreamCtx
|
||||
if not isinstance(internal_data, DataStreamCtx):
|
||||
if isinstance(internal_data, dict):
|
||||
internal_data = DataStreamCtx(internal_data)
|
||||
else:
|
||||
raise Exception("Invalid data type, data MUST be of type DataStream, or glob dict")
|
||||
|
||||
if "__firegex_packet_info" not in internal_data.filter_glob.keys():
|
||||
raise Exception("Packet info not found")
|
||||
return cls(**internal_data.filter_glob["__firegex_packet_info"])
|
||||
|
||||
def __repr__(self):
|
||||
return f"RawPacket(data={self.data}, raw_packet={self.raw_packet}, is_input={self.is_input}, is_ipv6={self.is_ipv6}, is_tcp={self.is_tcp}, l4_size={self.l4_size})"
|
||||
|
||||
|
||||
class DataStreamCtx:
|
||||
|
||||
def __init__(self, glob: dict):
|
||||
if "__firegex_pyfilter_ctx" not in glob.keys():
|
||||
glob["__firegex_pyfilter_ctx"] = {}
|
||||
self.__data = glob["__firegex_pyfilter_ctx"]
|
||||
self.filter_glob = glob
|
||||
|
||||
@property
|
||||
def filter_call_info(self) -> list[FilterHandler]:
|
||||
if "filter_call_info" not in self.__data.keys():
|
||||
self.__data["filter_call_info"] = []
|
||||
return self.__data.get("filter_call_info")
|
||||
|
||||
@filter_call_info.setter
|
||||
def filter_call_info(self, v: list[FilterHandler]):
|
||||
self.__data["filter_call_info"] = v
|
||||
|
||||
@property
|
||||
def stream(self) -> list[RawPacket]:
|
||||
if "stream" not in self.__data.keys():
|
||||
self.__data["stream"] = []
|
||||
return self.__data.get("stream")
|
||||
|
||||
@stream.setter
|
||||
def stream(self, v: list[RawPacket]):
|
||||
self.__data["stream"] = v
|
||||
|
||||
@property
|
||||
def stream_size(self) -> int:
|
||||
if "stream_size" not in self.__data.keys():
|
||||
self.__data["stream_size"] = 0
|
||||
return self.__data.get("stream_size")
|
||||
|
||||
@stream_size.setter
|
||||
def stream_size(self, v: int):
|
||||
self.__data["stream_size"] = v
|
||||
|
||||
@property
|
||||
def stream_max_size(self) -> int:
|
||||
if "stream_max_size" not in self.__data.keys():
|
||||
self.__data["stream_max_size"] = 1*8e20
|
||||
return self.__data.get("stream_max_size")
|
||||
|
||||
@stream_max_size.setter
|
||||
def stream_max_size(self, v: int):
|
||||
self.__data["stream_max_size"] = v
|
||||
|
||||
@property
|
||||
def full_stream_action(self) -> str:
|
||||
if "full_stream_action" not in self.__data.keys():
|
||||
self.__data["full_stream_action"] = "flush"
|
||||
return self.__data.get("full_stream_action")
|
||||
|
||||
@full_stream_action.setter
|
||||
def full_stream_action(self, v: str):
|
||||
self.__data["full_stream_action"] = v
|
||||
|
||||
@property
|
||||
def current_pkt(self) -> RawPacket:
|
||||
return self.__data.get("current_pkt", None)
|
||||
|
||||
@current_pkt.setter
|
||||
def current_pkt(self, v: RawPacket):
|
||||
self.__data["current_pkt"] = v
|
||||
|
||||
@property
|
||||
def http_data_objects(self) -> dict:
|
||||
if "http_data_objects" not in self.__data.keys():
|
||||
self.__data["http_data_objects"] = {}
|
||||
return self.__data.get("http_data_objects")
|
||||
|
||||
@http_data_objects.setter
|
||||
def http_data_objects(self, v: dict):
|
||||
self.__data["http_data_objects"] = v
|
||||
|
||||
@property
|
||||
def save_http_data_in_streams(self) -> bool:
|
||||
if "save_http_data_in_streams" not in self.__data.keys():
|
||||
self.__data["save_http_data_in_streams"] = False
|
||||
return self.__data.get("save_http_data_in_streams")
|
||||
|
||||
@save_http_data_in_streams.setter
|
||||
def save_http_data_in_streams(self, v: bool):
|
||||
self.__data["save_http_data_in_streams"] = v
|
||||
|
||||
@property
|
||||
def flush_action_set(self) -> set[Callable]:
|
||||
if "flush_action_set" not in self.__data.keys():
|
||||
self.__data["flush_action_set"] = set()
|
||||
return self.__data.get("flush_action_set")
|
||||
|
||||
@flush_action_set.setter
|
||||
def flush_action_set(self, v: set[Callable]):
|
||||
self.__data["flush_action_set"] = v
|
||||
|
||||
|
||||
3
fgex-lib/firegex/nfproxy/internals/exceptions.py
Normal file
3
fgex-lib/firegex/nfproxy/internals/exceptions.py
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
class NotReadyToRun(Exception):
|
||||
"raise this exception if the stream state is not ready to parse this object, the call will be skipped"
|
||||
40
fgex-lib/firegex/nfproxy/internals/models.py
Normal file
40
fgex-lib/firegex/nfproxy/internals/models.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
|
||||
class Action(Enum):
|
||||
ACCEPT = 0
|
||||
DROP = 1
|
||||
REJECT = 2
|
||||
MANGLE = 3
|
||||
|
||||
class FullStreamAction(Enum):
|
||||
FLUSH = 0
|
||||
ACCEPT = 1
|
||||
REJECT = 2
|
||||
DROP = 3
|
||||
|
||||
@dataclass
|
||||
class FilterHandler:
|
||||
func: callable
|
||||
name: str
|
||||
params: dict[type, callable]
|
||||
proto: str
|
||||
|
||||
@dataclass
|
||||
class PacketHandlerResult:
|
||||
glob: dict = field(repr=False)
|
||||
action: Action = Action.ACCEPT
|
||||
matched_by: str = None
|
||||
mangled_packet: bytes = None
|
||||
|
||||
def set_result(self) -> None:
|
||||
self.glob["__firegex_pyfilter_result"] = {
|
||||
"action": self.action.value,
|
||||
"matched_by": self.matched_by,
|
||||
"mangled_packet": self.mangled_packet
|
||||
}
|
||||
|
||||
def reset_result(self) -> None:
|
||||
self.glob["__firegex_pyfilter_result"] = None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user