code push

This commit is contained in:
Domingo Dirutigliano
2025-03-03 20:25:36 +01:00
parent 8ae533e8f7
commit 072745cc06
22 changed files with 1020 additions and 206 deletions

View File

@@ -1,23 +1,11 @@
import functools
from firegex.nfproxy.params import RawPacket
from enum import Enum
class Action(Enum):
ACCEPT = 0
DROP = 1
REJECT = 2
MANGLE = 3
class FullStreamAction(Enum):
FLUSH = 0
ACCEPT = 1
REJECT = 2
DROP = 3
from firegex.nfproxy.models import RawPacket, TCPInputStream, TCPOutputStream, TCPClientStream, TCPServerStream, TCPStreams
from firegex.nfproxy.internals.models import Action, FullStreamAction
ACCEPT = Action.ACCEPT
DROP = Action.DROP
REJECT = Action.REJECT
MANGLE = Action.MANGLE
UNSTABLE_MANGLE = Action.MANGLE
def pyfilter(func):
"""
@@ -45,8 +33,7 @@ def clear_pyfilter_registry():
pyfilter.registry.clear()
__all__ = [
"ACCEPT", "DROP", "REJECT", "MANGLE", "EXCEPTION", "INVALID",
"Action", "FullStreamAction",
"pyfilter",
"RawPacket"
"ACCEPT", "DROP", "REJECT", "UNSTABLE_MANGLE"
"Action", "FullStreamAction", "pyfilter",
"RawPacket", "TCPInputStream", "TCPOutputStream", "TCPClientStream", "TCPServerStream", "TCPStreams"
]

View File

@@ -1,60 +1,23 @@
from inspect import signature
from firegex.nfproxy.params import RawPacket, NotReadyToRun
from firegex.nfproxy import Action, FullStreamAction
from dataclasses import dataclass, field
from firegex.nfproxy.internals.models import Action, FullStreamAction
from firegex.nfproxy.internals.models import FilterHandler, PacketHandlerResult
import functools
from firegex.nfproxy.internals.data import DataStreamCtx
from firegex.nfproxy.internals.exceptions import NotReadyToRun
from firegex.nfproxy.internals.data import RawPacket
type_annotations_associations = {
"tcp": {
RawPacket: RawPacket.fetch_from_global
},
"http": {
RawPacket: RawPacket.fetch_from_global
}
}
@dataclass
class FilterHandler:
func: callable
name: str
params: dict[type, callable]
proto: str
class internal_data:
filter_call_info: list[FilterHandler] = []
stream: list[RawPacket] = []
stream_size: int = 0
stream_max_size: int = 1*8e20
full_stream_action: str = "flush"
filter_glob: dict = {}
@dataclass
class PacketHandlerResult:
glob: dict = field(repr=False)
action: Action = Action.ACCEPT
matched_by: str = None
mangled_packet: bytes = None
def set_result(self) -> None:
self.glob["__firegex_pyfilter_result"] = {
"action": self.action.value,
"matched_by": self.matched_by,
"mangled_packet": self.mangled_packet
}
def reset_result(self) -> None:
self.glob["__firegex_pyfilter_result"] = None
def context_call(func, *args, **kargs):
internal_data.filter_glob["__firegex_tmp_args"] = args
internal_data.filter_glob["__firegex_tmp_kargs"] = kargs
internal_data.filter_glob["__firege_tmp_call"] = func
res = eval("__firege_tmp_call(*__firegex_tmp_args, **__firegex_tmp_kargs)", internal_data.filter_glob, internal_data.filter_glob)
del internal_data.filter_glob["__firegex_tmp_args"]
del internal_data.filter_glob["__firegex_tmp_kargs"]
del internal_data.filter_glob["__firege_tmp_call"]
def context_call(glob, func, *args, **kargs):
glob["__firegex_tmp_args"] = args
glob["__firegex_tmp_kargs"] = kargs
glob["__firege_tmp_call"] = func
res = eval("__firege_tmp_call(*__firegex_tmp_args, **__firegex_tmp_kargs)", glob, glob)
del glob["__firegex_tmp_args"]
del glob["__firegex_tmp_kargs"]
del glob["__firege_tmp_call"]
return res
def generate_filter_structure(filters: list[str], proto:str, glob:dict) -> list[FilterHandler]:
from firegex.nfproxy.models import type_annotations_associations
if proto not in type_annotations_associations.keys():
raise Exception("Invalid protocol")
res = []
@@ -103,22 +66,27 @@ def get_filters_info(code:str, proto:str) -> list[FilterHandler]:
def get_filter_names(code:str, proto:str) -> list[str]:
return [ele.name for ele in get_filters_info(code, proto)]
def handle_packet() -> None:
def handle_packet(glob: dict) -> None:
internal_data = DataStreamCtx(glob)
print("I'm here", flush=True)
cache_call = {} # Cache of the data handler calls
pkt_info = RawPacket.fetch_from_global(internal_data.filter_glob)
pkt_info = RawPacket._fetch_packet(internal_data)
internal_data.current_pkt = pkt_info
cache_call[RawPacket] = pkt_info
final_result = Action.ACCEPT
data_size = len(pkt_info.data)
result = PacketHandlerResult(internal_data.filter_glob)
result = PacketHandlerResult(glob)
if internal_data.stream_size+data_size > internal_data.stream_max_size:
match internal_data.full_stream_action:
case FullStreamAction.FLUSH:
internal_data.stream = []
internal_data.stream_size = 0
for func in internal_data.flush_action_set:
func()
case FullStreamAction.ACCEPT:
result.action = Action.ACCEPT
return result.set_result()
@@ -138,17 +106,19 @@ def handle_packet() -> None:
mangled_packet = None
for filter in internal_data.filter_call_info:
final_params = []
skip_call = False
for data_type, data_func in filter.params.items():
if data_type not in cache_call.keys():
try:
cache_call[data_type] = data_func(internal_data.filter_glob)
cache_call[data_type] = data_func(internal_data)
except NotReadyToRun:
cache_call[data_type] = None
if cache_call[data_type] is None:
continue # Parsing raised NotReadyToRun, skip filter
skip_call = True
break
final_params.append(cache_call[data_type])
res = context_call(filter.func, *final_params)
if skip_call:
continue
res = context_call(glob, filter.func, *final_params)
if res is None:
continue #ACCEPTED
@@ -168,8 +138,10 @@ def handle_packet() -> None:
return result.set_result()
def compile(glob:dict) -> None:
internal_data.filter_glob = glob
def compile(glob:dict) -> None:
internal_data = DataStreamCtx(glob)
glob["print"] = functools.partial(print, flush = True)
filters = glob["__firegex_pyfilter_enabled"]
proto = glob["__firegex_proto"]
@@ -187,3 +159,4 @@ def compile(glob:dict) -> None:
internal_data.full_stream_action = FullStreamAction.FLUSH
PacketHandlerResult(glob).reset_result()

View File

@@ -0,0 +1,190 @@
from firegex.nfproxy.internals.models import FilterHandler
from typing import Callable
class RawPacket:
"""
class rapresentation of the nfqueue packet sent in this context by the c++ core
"""
def __init__(self,
data: bytes,
raw_packet: bytes,
is_input: bool,
is_ipv6: bool,
is_tcp: bool,
l4_size: int,
):
self.__data = bytes(data)
self.__raw_packet = bytes(raw_packet)
self.__is_input = bool(is_input)
self.__is_ipv6 = bool(is_ipv6)
self.__is_tcp = bool(is_tcp)
self.__l4_size = int(l4_size)
self.__raw_packet_header_size = len(self.__raw_packet)-self.__l4_size
@property
def is_input(self) -> bool:
return self.__is_input
@property
def is_ipv6(self) -> bool:
return self.__is_ipv6
@property
def is_tcp(self) -> bool:
return self.__is_tcp
@property
def data(self) -> bytes:
return self.__data
@property
def l4_size(self) -> int:
return self.__l4_size
@property
def raw_packet_header_len(self) -> int:
return self.__raw_packet_header_size
@property
def l4_data(self) -> bytes:
return self.__raw_packet[self.raw_packet_header_len:]
@l4_data.setter
def l4_data(self, v:bytes):
if not isinstance(v, bytes):
raise Exception("Invalid data type, data MUST be of type bytes")
#if len(v) != self.__l4_size:
# raise Exception("Invalid data size, must be equal to the original packet header size (due to a technical limitation)")
self.__raw_packet = self.__raw_packet[:self.raw_packet_header_len]+v
self.__l4_size = len(v)
@property
def raw_packet(self) -> bytes:
return self.__raw_packet
@raw_packet.setter
def raw_packet(self, v:bytes):
if not isinstance(v, bytes):
raise Exception("Invalid data type, data MUST be of type bytes")
#if len(v) != len(self.__raw_packet):
# raise Exception("Invalid data size, must be equal to the original packet size (due to a technical limitation)")
if len(v) < self.raw_packet_header_len:
raise Exception("Invalid data size, must be greater than the original packet header size")
self.__raw_packet = v
self.__l4_size = len(v)-self.raw_packet_header_len
@classmethod
def _fetch_packet(cls, internal_data):
from firegex.nfproxy.internals.data import DataStreamCtx
if not isinstance(internal_data, DataStreamCtx):
if isinstance(internal_data, dict):
internal_data = DataStreamCtx(internal_data)
else:
raise Exception("Invalid data type, data MUST be of type DataStream, or glob dict")
if "__firegex_packet_info" not in internal_data.filter_glob.keys():
raise Exception("Packet info not found")
return cls(**internal_data.filter_glob["__firegex_packet_info"])
def __repr__(self):
return f"RawPacket(data={self.data}, raw_packet={self.raw_packet}, is_input={self.is_input}, is_ipv6={self.is_ipv6}, is_tcp={self.is_tcp}, l4_size={self.l4_size})"
class DataStreamCtx:
def __init__(self, glob: dict):
if "__firegex_pyfilter_ctx" not in glob.keys():
glob["__firegex_pyfilter_ctx"] = {}
self.__data = glob["__firegex_pyfilter_ctx"]
self.filter_glob = glob
@property
def filter_call_info(self) -> list[FilterHandler]:
if "filter_call_info" not in self.__data.keys():
self.__data["filter_call_info"] = []
return self.__data.get("filter_call_info")
@filter_call_info.setter
def filter_call_info(self, v: list[FilterHandler]):
self.__data["filter_call_info"] = v
@property
def stream(self) -> list[RawPacket]:
if "stream" not in self.__data.keys():
self.__data["stream"] = []
return self.__data.get("stream")
@stream.setter
def stream(self, v: list[RawPacket]):
self.__data["stream"] = v
@property
def stream_size(self) -> int:
if "stream_size" not in self.__data.keys():
self.__data["stream_size"] = 0
return self.__data.get("stream_size")
@stream_size.setter
def stream_size(self, v: int):
self.__data["stream_size"] = v
@property
def stream_max_size(self) -> int:
if "stream_max_size" not in self.__data.keys():
self.__data["stream_max_size"] = 1*8e20
return self.__data.get("stream_max_size")
@stream_max_size.setter
def stream_max_size(self, v: int):
self.__data["stream_max_size"] = v
@property
def full_stream_action(self) -> str:
if "full_stream_action" not in self.__data.keys():
self.__data["full_stream_action"] = "flush"
return self.__data.get("full_stream_action")
@full_stream_action.setter
def full_stream_action(self, v: str):
self.__data["full_stream_action"] = v
@property
def current_pkt(self) -> RawPacket:
return self.__data.get("current_pkt", None)
@current_pkt.setter
def current_pkt(self, v: RawPacket):
self.__data["current_pkt"] = v
@property
def http_data_objects(self) -> dict:
if "http_data_objects" not in self.__data.keys():
self.__data["http_data_objects"] = {}
return self.__data.get("http_data_objects")
@http_data_objects.setter
def http_data_objects(self, v: dict):
self.__data["http_data_objects"] = v
@property
def save_http_data_in_streams(self) -> bool:
if "save_http_data_in_streams" not in self.__data.keys():
self.__data["save_http_data_in_streams"] = False
return self.__data.get("save_http_data_in_streams")
@save_http_data_in_streams.setter
def save_http_data_in_streams(self, v: bool):
self.__data["save_http_data_in_streams"] = v
@property
def flush_action_set(self) -> set[Callable]:
if "flush_action_set" not in self.__data.keys():
self.__data["flush_action_set"] = set()
return self.__data.get("flush_action_set")
@flush_action_set.setter
def flush_action_set(self, v: set[Callable]):
self.__data["flush_action_set"] = v

View File

@@ -0,0 +1,3 @@
class NotReadyToRun(Exception):
"raise this exception if the stream state is not ready to parse this object, the call will be skipped"

View File

@@ -0,0 +1,40 @@
from dataclasses import dataclass, field
from enum import Enum
class Action(Enum):
ACCEPT = 0
DROP = 1
REJECT = 2
MANGLE = 3
class FullStreamAction(Enum):
FLUSH = 0
ACCEPT = 1
REJECT = 2
DROP = 3
@dataclass
class FilterHandler:
func: callable
name: str
params: dict[type, callable]
proto: str
@dataclass
class PacketHandlerResult:
glob: dict = field(repr=False)
action: Action = Action.ACCEPT
matched_by: str = None
mangled_packet: bytes = None
def set_result(self) -> None:
self.glob["__firegex_pyfilter_result"] = {
"action": self.action.value,
"matched_by": self.matched_by,
"mangled_packet": self.mangled_packet
}
def reset_result(self) -> None:
self.glob["__firegex_pyfilter_result"] = None

View File

@@ -0,0 +1,28 @@
from firegex.nfproxy.models.tcp import TCPInputStream, TCPOutputStream, TCPClientStream, TCPServerStream, TCPStreams
from firegex.nfproxy.models.http import HttpRequest, HttpResponse, HttpRequestHeader, HttpResponseHeader
from firegex.nfproxy.internals.data import RawPacket
type_annotations_associations = {
"tcp": {
RawPacket: RawPacket._fetch_packet,
TCPInputStream: TCPInputStream._fetch_packet,
TCPOutputStream: TCPOutputStream._fetch_packet,
TCPStreams: TCPStreams._fetch_packet,
},
"http": {
RawPacket: RawPacket._fetch_packet,
TCPInputStream: TCPInputStream._fetch_packet,
TCPOutputStream: TCPOutputStream._fetch_packet,
TCPStreams: TCPStreams._fetch_packet,
HttpRequest: HttpRequest._fetch_packet,
HttpResponse: HttpResponse._fetch_packet,
HttpRequestHeader: HttpRequestHeader._fetch_packet,
HttpResponseHeader: HttpResponseHeader._fetch_packet,
}
}
__all__ = [
"RawPacket",
"TCPInputStream", "TCPOutputStream", "TCPClientStream", "TCPServerStream", "TCPStreams",
"HttpRequest", "HttpResponse", "HttpRequestHeader", "HttpResponseHeader",
]

View File

@@ -0,0 +1,385 @@
import pyllhttp
from firegex.nfproxy.internals.exceptions import NotReadyToRun
from firegex.nfproxy.internals.data import DataStreamCtx
class InternalCallbackHandler():
url: str|None = None
_url_buffer: bytes = b""
headers: dict[str, str] = {}
_header_fields: dict[bytes, bytes] = {}
has_begun: bool = False
body: bytes = None
_body_buffer: bytes = b""
headers_complete: bool = False
message_complete: bool = False
status: str|None = None
_status_buffer: bytes = b""
current_header_field = None
current_header_value = None
_save_body = True
def on_message_begin(self):
self.has_begun = True
def on_url(self, url):
self._url_buffer += url
def on_url_complete(self):
self.url = self._url_buffer.decode(errors="ignore")
self._url_buffer = None
def on_header_field(self, field):
if self.current_header_field is None:
self.current_header_field = bytearray(field)
else:
self.current_header_field += field
def on_header_field_complete(self):
self.current_header_field = self.current_header_field
def on_header_value(self, value):
if self.current_header_value is None:
self.current_header_value = bytearray(value)
else:
self.current_header_value += value
def on_header_value_complete(self):
if self.current_header_value is not None and self.current_header_field is not None:
self._header_fields[self.current_header_field.decode(errors="ignore")] = self.current_header_value.decode(errors="ignore")
self.current_header_field = None
self.current_header_value = None
def on_headers_complete(self):
self.headers_complete = True
self.headers = self._header_fields
self._header_fields = {}
self.current_header_field = None
self.current_header_value = None
def on_body(self, body: bytes):
if self._save_body:
self._body_buffer += body
def on_message_complete(self):
self.body = self._body_buffer
self._body_buffer = b""
self.message_complete = True
def on_status(self, status: bytes):
self._status_buffer += status
def on_status_complete(self):
self.status = self._status_buffer.decode(errors="ignore")
self._status_buffer = b""
@property
def keep_alive(self) -> bool:
return self.should_keep_alive
@property
def should_upgrade(self) -> bool:
return self.is_upgrading
@property
def http_version(self) -> str:
return f"{self.major}.{self.minor}"
@property
def method_parsed(self) -> str:
return self.method.decode(errors="ignore")
@property
def content_length_parsed(self) -> int:
return self.content_length
class InternalHttpRequest(InternalCallbackHandler, pyllhttp.Request):
def __init__(self):
super(pyllhttp.Request, self).__init__()
super(InternalCallbackHandler, self).__init__()
class InternalHttpResponse(InternalCallbackHandler, pyllhttp.Response):
def __init__(self):
super(pyllhttp.Response, self).__init__()
super(InternalCallbackHandler, self).__init__()
class InternalBasicHttpMetaClass:
def __init__(self):
self._parser: InternalHttpRequest|InternalHttpResponse
self._headers_were_set = False
self.stream = b""
self.raised_error = False
@property
def url(self) -> str|None:
return self._parser.url
@property
def headers(self) -> dict[str, str]:
return self._parser.headers
@property
def has_begun(self) -> bool:
return self._parser.has_begun
@property
def body(self) -> bytes:
return self._parser.body
@property
def headers_complete(self) -> bool:
return self._parser.headers_complete
@property
def message_complete(self) -> bool:
return self._parser.message_complete
@property
def http_version(self) -> str:
return self._parser.http_version
@property
def keep_alive(self) -> bool:
return self._parser.keep_alive
@property
def should_upgrade(self) -> bool:
return self._parser.should_upgrade
@property
def content_length(self) -> int|None:
return self._parser.content_length_parsed
@property
def method(self) -> str|None:
return self._parser.method_parsed
def _fetch_current_packet(self, internal_data: DataStreamCtx):
# TODO: if an error is triggered should I reject the connection?
if internal_data.save_http_data_in_streams: # This is a websocket upgrade!
self.stream += internal_data.current_pkt.data
else:
try:
self._parser.execute(internal_data.current_pkt.data)
if self._parser.headers_complete and len(self._parser._body_buffer) == self._parser.content_length_parsed:
self._parser.on_message_complete()
except Exception as e:
self.raised_error = True
raise e
#It's called the first time if the headers are complete, and second time with body complete
def _callable_checks(self, internal_data: DataStreamCtx):
if self._parser.headers_complete and not self._headers_were_set:
self._headers_were_set = True
return True
return self._parser.message_complete or internal_data.save_http_data_in_streams
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return True
def _trigger_remove_data(self, internal_data: DataStreamCtx):
return self.message_complete
@classmethod
def _fetch_packet(cls, internal_data: DataStreamCtx):
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False:
raise NotReadyToRun()
datahandler:InternalBasicHttpMetaClass = internal_data.http_data_objects.get(cls, None)
if datahandler is None or datahandler.raised_error:
datahandler = cls()
internal_data.http_data_objects[cls] = datahandler
if not datahandler._before_fetch_callable_checks(internal_data):
raise NotReadyToRun()
datahandler._fetch_current_packet(internal_data)
if not datahandler._callable_checks(internal_data):
raise NotReadyToRun()
if datahandler.should_upgrade:
internal_data.save_http_data_in_streams = True
if datahandler._trigger_remove_data(internal_data):
if internal_data.http_data_objects.get(cls):
del internal_data.http_data_objects[cls]
return datahandler
class HttpRequest(InternalBasicHttpMetaClass):
def __init__(self):
super().__init__()
# These will be used in the metaclass
self._parser: InternalHttpRequest = InternalHttpRequest()
self._headers_were_set = False
@property
def method(self) -> bytes:
return self._parser.method
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return internal_data.current_pkt.is_input
def __repr__(self):
return f"<HttpRequest method={self.method} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} has_begun={self.has_begun} content_length={self.content_length} stream={self.stream}>"
class HttpResponse(InternalBasicHttpMetaClass):
def __init__(self):
super().__init__()
self._parser: InternalHttpResponse = InternalHttpResponse()
self._headers_were_set = False
@property
def status_code(self) -> int:
return self._parser.status
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return not internal_data.current_pkt.is_input
def __repr__(self):
return f"<HttpResponse status_code={self.status_code} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} has_begun={self.has_begun} content_length={self.content_length} stream={self.stream}>"
class HttpRequestHeader(HttpRequest):
def __init__(self):
super().__init__()
self._parser._save_body = False
def _callable_checks(self, internal_data: DataStreamCtx):
if self._parser.headers_complete and not self._headers_were_set:
self._headers_were_set = True
return True
return False
class HttpResponseHeader(HttpResponse):
def __init__(self):
super().__init__()
self._parser._save_body = False
def _callable_checks(self, internal_data: DataStreamCtx):
if self._parser.headers_complete and not self._headers_were_set:
self._headers_were_set = True
return True
return False
"""
#TODO include this?
import codecs
# Null bytes; no need to recreate these on each call to guess_json_utf
_null = "\x00".encode("ascii") # encoding to ASCII for Python 3
_null2 = _null * 2
_null3 = _null * 3
def guess_json_utf(data):
""
:rtype: str
""
# JSON always starts with two ASCII characters, so detection is as
# easy as counting the nulls and from their location and count
# determine the encoding. Also detect a BOM, if present.
sample = data[:4]
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
return "utf-32" # BOM included
if sample[:3] == codecs.BOM_UTF8:
return "utf-8-sig" # BOM included, MS style (discouraged)
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
return "utf-16" # BOM included
nullcount = sample.count(_null)
if nullcount == 0:
return "utf-8"
if nullcount == 2:
if sample[::2] == _null2: # 1st and 3rd are null
return "utf-16-be"
if sample[1::2] == _null2: # 2nd and 4th are null
return "utf-16-le"
# Did not detect 2 valid UTF-16 ascii-range characters
if nullcount == 3:
if sample[:3] == _null3:
return "utf-32-be"
if sample[1:] == _null3:
return "utf-32-le"
# Did not detect a valid UTF-32 ascii-range character
return None
from http_parser.pyparser import HttpParser
import json
from urllib.parse import parse_qsl
from dataclasses import dataclass
@dataclass
class HttpMessage():
fragment: str
headers: dict
method: str
parameters: dict
path: str
query_string: str
raw_body: bytes
status_code: int
url: str
version: str
class HttpMessageParser(HttpParser):
def __init__(self, data:bytes, decompress_body=True):
super().__init__(decompress = decompress_body)
self.execute(data, len(data))
self._parameters = {}
try:
self._parse_parameters()
except Exception as e:
print("Error in parameters parsing:", data)
print("Exception:", str(e))
def get_raw_body(self):
return b"\r\n".join(self._body)
def _parse_query_string(self, raw_string):
parameters = parse_qsl(raw_string)
for key,value in parameters:
try:
key = key.decode()
value = value.decode()
except:
pass
if self._parameters.get(key):
if isinstance(self._parameters[key], list):
self._parameters[key].append(value)
else:
self._parameters[key] = [self._parameters[key], value]
else:
self._parameters[key] = value
def _parse_parameters(self):
if self._method == "POST":
body = self.get_raw_body()
if len(body) == 0:
return
content_type = self.get_headers().get("Content-Type")
if not content_type or "x-www-form-urlencoded" in content_type:
try:
self._parse_query_string(body.decode())
except:
pass
elif "json" in content_type:
self._parameters = json.loads(body)
elif self._method == "GET":
self._parse_query_string(self._query_string)
def get_parameters(self):
""returns parameters parsed from query string or body""
return self._parameters
def get_version(self):
if self._version:
return ".".join([str(x) for x in self._version])
return None
def to_message(self):
return HttpMessage(self._fragment, self._headers, self._method,
self._parameters, self._path, self._query_string,
self.get_raw_body(), self._status_code,
self._url, self.get_version()
)
"""

View File

@@ -0,0 +1,113 @@
from firegex.nfproxy.internals.data import DataStreamCtx
from firegex.nfproxy.internals.exceptions import NotReadyToRun
class TCPStreams:
"""
This datamodel will assemble the TCP streams from the input and output data.
The function that use this data model will be handled when:
- The packet is TCP
- At least 1 packet has been sent
"""
def __init__(self,
input_data: bytes,
output_data: bytes,
is_ipv6: bool,
):
self.__input_data = bytes(input_data)
self.__output_data = bytes(output_data)
self.__is_ipv6 = bool(is_ipv6)
@property
def input_data(self) -> bytes:
return self.__input_data
@property
def output_data(self) -> bytes:
return self.__output_data
@property
def is_ipv6(self) -> bool:
return self.__is_ipv6
@classmethod
def _fetch_packet(cls, internal_data:DataStreamCtx):
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False:
raise NotReadyToRun()
return cls(
input_data=b"".join([ele.data for ele in internal_data.stream if ele.is_input]),
output_data=b"".join([ele.data for ele in internal_data.stream if not ele.is_input]),
is_ipv6=internal_data.current_pkt.is_ipv6,
)
class TCPInputStream:
"""
This datamodel will assemble the TCP input stream from the client sent data.
The function that use this data model will be handled when:
- The packet is TCP
- At least 1 packet has been sent
- A new client packet has been received
"""
def __init__(self,
data: bytes,
is_ipv6: bool,
):
self.__data = bytes(data)
self.__is_ipv6 = bool(is_ipv6)
@property
def data(self) -> bool:
return self.__data
@property
def is_ipv6(self) -> bool:
return self.__is_ipv6
@classmethod
def _fetch_packet(cls, internal_data:DataStreamCtx):
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False or internal_data.current_pkt.is_input is False:
raise NotReadyToRun()
return cls(
data=internal_data.current_pkt.get_related_raw_stream(),
is_ipv6=internal_data.current_pkt.is_ipv6,
)
TCPClientStream = TCPInputStream
class TCPOutputStream:
"""
This datamodel will assemble the TCP output stream from the server sent data.
The function that use this data model will be handled when:
- The packet is TCP
- At least 1 packet has been sent
- A new server packet has been sent
"""
def __init__(self,
data: bytes,
is_ipv6: bool,
):
self.__data = bytes(data)
self.__is_ipv6 = bool(is_ipv6)
@property
def data(self) -> bool:
return self.__data
@property
def is_ipv6(self) -> bool:
return self.__is_ipv6
@classmethod
def _fetch_packet(cls, internal_data:DataStreamCtx):
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False or internal_data.current_pkt.is_input is True:
raise NotReadyToRun()
return cls(
data=internal_data.current_pkt.get_related_raw_stream(),
is_ipv6=internal_data.current_pkt.is_ipv6,
)
TCPServerStream = TCPOutputStream

View File

@@ -1,79 +0,0 @@
class NotReadyToRun(Exception): # raise this exception if the stream state is not ready to parse this object, the call will be skipped
pass
class RawPacket:
def __init__(self,
data: bytes,
raw_packet: bytes,
is_input: bool,
is_ipv6: bool,
is_tcp: bool,
l4_size: int,
):
self.__data = bytes(data)
self.__raw_packet = bytes(raw_packet)
self.__is_input = bool(is_input)
self.__is_ipv6 = bool(is_ipv6)
self.__is_tcp = bool(is_tcp)
self.__l4_size = int(l4_size)
self.__raw_packet_header_size = len(self.__raw_packet)-self.__l4_size
@property
def is_input(self) -> bool:
return self.__is_input
@property
def is_ipv6(self) -> bool:
return self.__is_ipv6
@property
def is_tcp(self) -> bool:
return self.__is_tcp
@property
def data(self) -> bytes:
return self.__data
@property
def l4_size(self) -> int:
return self.__l4_size
@property
def raw_packet_header_len(self) -> int:
return self.__raw_packet_header_size
@property
def l4_data(self) -> bytes:
return self.__raw_packet[self.raw_packet_header_len:]
@l4_data.setter
def l4_data(self, v:bytes):
if not isinstance(v, bytes):
raise Exception("Invalid data type, data MUST be of type bytes")
#if len(v) != self.__l4_size:
# raise Exception("Invalid data size, must be equal to the original packet header size (due to a technical limitation)")
self.__raw_packet = self.__raw_packet[:self.raw_packet_header_len]+v
self.__l4_size = len(v)
@property
def raw_packet(self) -> bytes:
return self.__raw_packet
@raw_packet.setter
def raw_packet(self, v:bytes):
if not isinstance(v, bytes):
raise Exception("Invalid data type, data MUST be of type bytes")
#if len(v) != len(self.__raw_packet):
# raise Exception("Invalid data size, must be equal to the original packet size (due to a technical limitation)")
if len(v) < self.raw_packet_header_len:
raise Exception("Invalid data size, must be greater than the original packet header size")
self.__raw_packet = v
self.__l4_size = len(v)-self.raw_packet_header_len
@classmethod
def fetch_from_global(cls, glob):
if "__firegex_packet_info" not in glob.keys():
raise Exception("Packet info not found")
return cls(**glob["__firegex_packet_info"])