code push
This commit is contained in:
@@ -364,9 +364,9 @@ class PktRequest {
|
|||||||
#endif
|
#endif
|
||||||
if (tcp && ack_seq_offset && packet.size() != _original_size){
|
if (tcp && ack_seq_offset && packet.size() != _original_size){
|
||||||
if (is_input){
|
if (is_input){
|
||||||
ack_seq_offset->in += packet.size() - _original_size;
|
ack_seq_offset->in += data_size() - _data_original_size;
|
||||||
}else{
|
}else{
|
||||||
ack_seq_offset->out += packet.size() - _original_size;
|
ack_seq_offset->out += data_size() - _data_original_size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
nfq_nlmsg_verdict_put(nlh_verdict, ntohl(packet_id), NF_ACCEPT );
|
nfq_nlmsg_verdict_put(nlh_verdict, ntohl(packet_id), NF_ACCEPT );
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ PyFilterResponse {
|
|||||||
|
|
||||||
Every time a packet is received, the packet handler will execute the following code:
|
Every time a packet is received, the packet handler will execute the following code:
|
||||||
```python
|
```python
|
||||||
firegex.nfproxy.internals.handle_packet()
|
firegex.nfproxy.internals.handle_packet(globals())
|
||||||
````
|
````
|
||||||
|
|
||||||
The TCP stream is sorted by libtins using c++ code, but the c++ code is not responsabile di buffer the stream, but only to sort those
|
The TCP stream is sorted by libtins using c++ code, but the c++ code is not responsabile di buffer the stream, but only to sort those
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
|
|||||||
public:
|
public:
|
||||||
stream_ctx sctx;
|
stream_ctx sctx;
|
||||||
StreamFollower follower;
|
StreamFollower follower;
|
||||||
PyThreadState * gtstate = nullptr;
|
PyThreadState * tstate = nullptr;
|
||||||
|
|
||||||
PyInterpreterConfig py_thread_config = {
|
PyInterpreterConfig py_thread_config = {
|
||||||
.use_main_obmalloc = 0,
|
.use_main_obmalloc = 0,
|
||||||
@@ -45,15 +45,16 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
|
|||||||
.check_multi_interp_extensions = 1,
|
.check_multi_interp_extensions = 1,
|
||||||
.gil = PyInterpreterConfig_OWN_GIL,
|
.gil = PyInterpreterConfig_OWN_GIL,
|
||||||
};
|
};
|
||||||
PyThreadState *tstate = NULL;
|
|
||||||
NfQueue::PktRequest<PyProxyQueue>* pkt;
|
NfQueue::PktRequest<PyProxyQueue>* pkt;
|
||||||
NfQueue::tcp_ack_seq_ctx* current_tcp_ack = nullptr;
|
NfQueue::tcp_ack_seq_ctx* current_tcp_ack = nullptr;
|
||||||
|
|
||||||
|
PyObject* handle_packet_code = nullptr;
|
||||||
|
|
||||||
void before_loop() override {
|
void before_loop() override {
|
||||||
PyStatus pystatus;
|
PyStatus pystatus;
|
||||||
// Create a new interpreter for the thread
|
// Create a new interpreter for the thread
|
||||||
gtstate = PyThreadState_New(PyInterpreterState_Main());
|
tstate = PyThreadState_New(PyInterpreterState_Main());
|
||||||
PyEval_AcquireThread(gtstate);
|
PyEval_AcquireThread(tstate);
|
||||||
pystatus = Py_NewInterpreterFromConfig(&tstate, &py_thread_config);
|
pystatus = Py_NewInterpreterFromConfig(&tstate, &py_thread_config);
|
||||||
if(tstate == nullptr){
|
if(tstate == nullptr){
|
||||||
cerr << "[fatal] [main] Failed to create new interpreter" << endl;
|
cerr << "[fatal] [main] Failed to create new interpreter" << endl;
|
||||||
@@ -64,6 +65,12 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
|
|||||||
Py_ExitStatusException(pystatus);
|
Py_ExitStatusException(pystatus);
|
||||||
throw invalid_argument("Failed to create new interpreter (pystatus exc)");
|
throw invalid_argument("Failed to create new interpreter (pystatus exc)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(!PyGC_IsEnabled()){
|
||||||
|
PyGC_Enable();
|
||||||
|
}
|
||||||
|
|
||||||
|
handle_packet_code = unmarshal_code(py_handle_packet_code);
|
||||||
// Setting callbacks for the stream follower
|
// Setting callbacks for the stream follower
|
||||||
follower.new_stream_callback(bind(on_new_stream, placeholders::_1, this));
|
follower.new_stream_callback(bind(on_new_stream, placeholders::_1, this));
|
||||||
follower.stream_termination_callback(bind(on_stream_close, placeholders::_1, this));
|
follower.stream_termination_callback(bind(on_stream_close, placeholders::_1, this));
|
||||||
@@ -100,11 +107,24 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
|
|||||||
if (compiled_code == nullptr){
|
if (compiled_code == nullptr){
|
||||||
stream.client_data_callback(nullptr);
|
stream.client_data_callback(nullptr);
|
||||||
stream.server_data_callback(nullptr);
|
stream.server_data_callback(nullptr);
|
||||||
|
stream.ignore_client_data();
|
||||||
|
stream.ignore_server_data();
|
||||||
return pkt->accept();
|
return pkt->accept();
|
||||||
|
}else{
|
||||||
|
try{
|
||||||
|
stream_match = new pyfilter_ctx(compiled_code, handle_packet_code);
|
||||||
|
}catch(invalid_argument& e){
|
||||||
|
cerr << "[error] [filter_action] Failed to create the filter context" << endl;
|
||||||
|
print_exception_reason();
|
||||||
|
sctx.clean_stream_by_id(pkt->sid);
|
||||||
|
stream.client_data_callback(nullptr);
|
||||||
|
stream.server_data_callback(nullptr);
|
||||||
|
stream.ignore_client_data();
|
||||||
|
stream.ignore_server_data();
|
||||||
|
return pkt->accept();
|
||||||
|
}
|
||||||
|
sctx.streams_ctx.insert_or_assign(pkt->sid, stream_match);
|
||||||
}
|
}
|
||||||
stream_match = new pyfilter_ctx(compiled_code);
|
|
||||||
Py_DECREF(compiled_code);
|
|
||||||
sctx.streams_ctx.insert_or_assign(pkt->sid, stream_match);
|
|
||||||
}else{
|
}else{
|
||||||
stream_match = stream_search->second;
|
stream_match = stream_search->second;
|
||||||
}
|
}
|
||||||
@@ -140,6 +160,8 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
|
|||||||
print_exception_reason();
|
print_exception_reason();
|
||||||
sctx.clean_stream_by_id(pkt->sid);
|
sctx.clean_stream_by_id(pkt->sid);
|
||||||
//Free the packet data
|
//Free the packet data
|
||||||
|
stream.ignore_client_data();
|
||||||
|
stream.ignore_server_data();
|
||||||
stream.client_data_callback(nullptr);
|
stream.client_data_callback(nullptr);
|
||||||
stream.server_data_callback(nullptr);
|
stream.server_data_callback(nullptr);
|
||||||
return pkt->accept();
|
return pkt->accept();
|
||||||
@@ -233,6 +255,7 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
|
|||||||
PyEval_ReleaseThread(tstate);
|
PyEval_ReleaseThread(tstate);
|
||||||
PyThreadState_Clear(tstate);
|
PyThreadState_Clear(tstate);
|
||||||
PyThreadState_Delete(tstate);
|
PyThreadState_Delete(tstate);
|
||||||
|
Py_DECREF(handle_packet_code);
|
||||||
|
|
||||||
sctx.clean();
|
sctx.clean();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,9 +16,13 @@ namespace PyProxy {
|
|||||||
class PyCodeConfig;
|
class PyCodeConfig;
|
||||||
|
|
||||||
shared_ptr<PyCodeConfig> config;
|
shared_ptr<PyCodeConfig> config;
|
||||||
PyObject* py_handle_packet_code = nullptr;
|
|
||||||
UnixClientConnection control_socket;
|
UnixClientConnection control_socket;
|
||||||
|
|
||||||
|
PyObject* unmarshal_code(string encoded_code){
|
||||||
|
if (encoded_code.empty()) return nullptr;
|
||||||
|
return PyMarshal_ReadObjectFromString(encoded_code.c_str(), encoded_code.size());
|
||||||
|
}
|
||||||
|
|
||||||
class PyCodeConfig{
|
class PyCodeConfig{
|
||||||
public:
|
public:
|
||||||
string encoded_code;
|
string encoded_code;
|
||||||
@@ -32,22 +36,24 @@ class PyCodeConfig{
|
|||||||
PyObject* glob = PyDict_New();
|
PyObject* glob = PyDict_New();
|
||||||
PyObject* result = PyEval_EvalCode(compiled_code, glob, glob);
|
PyObject* result = PyEval_EvalCode(compiled_code, glob, glob);
|
||||||
Py_DECREF(glob);
|
Py_DECREF(glob);
|
||||||
if (!result){
|
if (PyErr_Occurred()){
|
||||||
PyErr_Print();
|
PyErr_Print();
|
||||||
Py_DECREF(compiled_code);
|
Py_DECREF(compiled_code);
|
||||||
std::cerr << "[fatal] [main] Failed to execute the code" << endl;
|
std::cerr << "[fatal] [main] Failed to execute the code" << endl;
|
||||||
throw invalid_argument("Failed to execute the code, maybe an invalid filter code has been provided");
|
throw invalid_argument("Failed to execute the code, maybe an invalid filter code has been provided");
|
||||||
}
|
}
|
||||||
Py_DECREF(result);
|
Py_XDECREF(result);
|
||||||
PyObject* code_dump = PyMarshal_WriteObjectToString(compiled_code, 4);
|
PyObject* code_dump = PyMarshal_WriteObjectToString(compiled_code, 4);
|
||||||
Py_DECREF(compiled_code);
|
Py_DECREF(compiled_code);
|
||||||
if (code_dump == nullptr){
|
if (code_dump == nullptr){
|
||||||
PyErr_Print();
|
if (PyErr_Occurred())
|
||||||
|
PyErr_Print();
|
||||||
std::cerr << "[fatal] [main] Failed to dump the code" << endl;
|
std::cerr << "[fatal] [main] Failed to dump the code" << endl;
|
||||||
throw invalid_argument("Failed to dump the code");
|
throw invalid_argument("Failed to dump the code");
|
||||||
}
|
}
|
||||||
if (!PyBytes_Check(code_dump)){
|
if (!PyBytes_Check(code_dump)){
|
||||||
std::cerr << "[fatal] [main] Failed to dump the code" << endl;
|
std::cerr << "[fatal] [main] Failed to dump the code" << endl;
|
||||||
|
Py_DECREF(code_dump);
|
||||||
throw invalid_argument("Failed to dump the code");
|
throw invalid_argument("Failed to dump the code");
|
||||||
}
|
}
|
||||||
encoded_code = string(PyBytes_AsString(code_dump), PyBytes_Size(code_dump));
|
encoded_code = string(PyBytes_AsString(code_dump), PyBytes_Size(code_dump));
|
||||||
@@ -55,8 +61,7 @@ class PyCodeConfig{
|
|||||||
}
|
}
|
||||||
|
|
||||||
PyObject* compiled_code(){
|
PyObject* compiled_code(){
|
||||||
if (encoded_code.empty()) return nullptr;
|
return unmarshal_code(encoded_code);
|
||||||
return PyMarshal_ReadObjectFromString(encoded_code.c_str(), encoded_code.size());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PyCodeConfig(){}
|
PyCodeConfig(){}
|
||||||
@@ -69,16 +74,27 @@ void init_control_socket(){
|
|||||||
control_socket = UnixClientConnection(socket_path);
|
control_socket = UnixClientConnection(socket_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
string py_handle_packet_code;
|
||||||
|
|
||||||
void init_handle_packet_code(){
|
void init_handle_packet_code(){
|
||||||
py_handle_packet_code = Py_CompileStringExFlags(
|
PyObject* compiled_code = Py_CompileStringExFlags(
|
||||||
"firegex.nfproxy.internals.handle_packet()\n", "<pyfilter>",
|
"firegex.nfproxy.internals.handle_packet(globals())\n", "<pyfilter>",
|
||||||
Py_file_input, NULL, 2);
|
Py_file_input, NULL, 2);
|
||||||
|
PyObject* code_dump = PyMarshal_WriteObjectToString(compiled_code, 4);
|
||||||
if (py_handle_packet_code == nullptr){
|
Py_DECREF(compiled_code);
|
||||||
std::cerr << "[fatal] [main] Failed to compile the utility python code (strange behaviour, probably a bug)" << endl;
|
if (code_dump == nullptr){
|
||||||
throw invalid_argument("Failed to compile the code");
|
if (PyErr_Occurred())
|
||||||
|
PyErr_Print();
|
||||||
|
std::cerr << "[fatal] [main] Failed to dump the code" << endl;
|
||||||
|
throw invalid_argument("Failed to dump the code");
|
||||||
}
|
}
|
||||||
|
if (!PyBytes_Check(code_dump)){
|
||||||
|
std::cerr << "[fatal] [main] Failed to dump the code" << endl;
|
||||||
|
Py_DECREF(code_dump);
|
||||||
|
throw invalid_argument("Failed to dump the code");
|
||||||
|
}
|
||||||
|
py_handle_packet_code = string(PyBytes_AsString(code_dump), PyBytes_Size(code_dump));
|
||||||
|
Py_DECREF(code_dump);
|
||||||
}
|
}
|
||||||
|
|
||||||
}}
|
}}
|
||||||
|
|||||||
@@ -55,11 +55,15 @@ typedef Tins::TCPIP::StreamIdentifier stream_id;
|
|||||||
struct pyfilter_ctx {
|
struct pyfilter_ctx {
|
||||||
|
|
||||||
PyObject * glob = nullptr;
|
PyObject * glob = nullptr;
|
||||||
|
PyObject * py_handle_packet = nullptr;
|
||||||
|
|
||||||
pyfilter_ctx(PyObject * compiled_code){
|
pyfilter_ctx(PyObject * compiled_code, PyObject * handle_packet_code){
|
||||||
|
py_handle_packet = handle_packet_code;
|
||||||
|
Py_INCREF(py_handle_packet);
|
||||||
glob = PyDict_New();
|
glob = PyDict_New();
|
||||||
PyObject* result = PyEval_EvalCode(compiled_code, glob, glob);
|
PyObject* result = PyEval_EvalCode(compiled_code, glob, glob);
|
||||||
if (!result){
|
Py_XDECREF(compiled_code);
|
||||||
|
if (PyErr_Occurred()){
|
||||||
PyErr_Print();
|
PyErr_Print();
|
||||||
Py_XDECREF(glob);
|
Py_XDECREF(glob);
|
||||||
std::cerr << "[fatal] [main] Failed to compile the code" << endl;
|
std::cerr << "[fatal] [main] Failed to compile the code" << endl;
|
||||||
@@ -69,7 +73,10 @@ struct pyfilter_ctx {
|
|||||||
}
|
}
|
||||||
|
|
||||||
~pyfilter_ctx(){
|
~pyfilter_ctx(){
|
||||||
|
cerr << "[info] [pyfilter_ctx] Cleaning pyfilter_ctx" << endl;
|
||||||
Py_DECREF(glob);
|
Py_DECREF(glob);
|
||||||
|
Py_DECREF(py_handle_packet);
|
||||||
|
PyGC_Collect();
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void set_item_to_glob(const char* key, PyObject* value){
|
inline void set_item_to_glob(const char* key, PyObject* value){
|
||||||
@@ -82,14 +89,16 @@ struct pyfilter_ctx {
|
|||||||
|
|
||||||
void del_item_from_glob(const char* key){
|
void del_item_from_glob(const char* key){
|
||||||
if (PyDict_DelItemString(glob, key) != 0){
|
if (PyDict_DelItemString(glob, key) != 0){
|
||||||
PyErr_Print();
|
if (PyErr_Occurred())
|
||||||
|
PyErr_Print();
|
||||||
throw invalid_argument("Failed to delete item from dict");
|
throw invalid_argument("Failed to delete item from dict");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void set_item_to_dict(PyObject* dict, const char* key, PyObject* value){
|
inline void set_item_to_dict(PyObject* dict, const char* key, PyObject* value){
|
||||||
if (PyDict_SetItemString(dict, key, value) != 0){
|
if (PyDict_SetItemString(dict, key, value) != 0){
|
||||||
PyErr_Print();
|
if (PyErr_Occurred())
|
||||||
|
PyErr_Print();
|
||||||
throw invalid_argument("Failed to set item to dict");
|
throw invalid_argument("Failed to set item to dict");
|
||||||
}
|
}
|
||||||
Py_DECREF(value);
|
Py_DECREF(value);
|
||||||
@@ -111,11 +120,18 @@ struct pyfilter_ctx {
|
|||||||
|
|
||||||
// Set packet info to the global context
|
// Set packet info to the global context
|
||||||
set_item_to_glob("__firegex_packet_info", packet_info);
|
set_item_to_glob("__firegex_packet_info", packet_info);
|
||||||
PyObject * result = PyEval_EvalCode(py_handle_packet_code, glob, glob);
|
#ifdef DEBUG
|
||||||
|
cerr << "[DEBUG] [handle_packet] Calling python with a data of " << data.size() << endl;
|
||||||
|
#endif
|
||||||
|
PyObject * result = PyEval_EvalCode(py_handle_packet, glob, glob);
|
||||||
|
PyGC_Collect();
|
||||||
|
#ifdef DEBUG
|
||||||
|
cerr << "[DEBUG] [handle_packet] End of python call" << endl;
|
||||||
|
#endif
|
||||||
del_item_from_glob("__firegex_packet_info");
|
del_item_from_glob("__firegex_packet_info");
|
||||||
|
|
||||||
Py_DECREF(packet_info);
|
if (PyErr_Occurred()){
|
||||||
if (!result){
|
cerr << "[error] [handle_packet] Failed to execute the code " << result << endl;
|
||||||
PyErr_Print();
|
PyErr_Print();
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
cerr << "[DEBUG] [handle_packet] Exception raised" << endl;
|
cerr << "[DEBUG] [handle_packet] Exception raised" << endl;
|
||||||
@@ -134,7 +150,9 @@ struct pyfilter_ctx {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!PyDict_Check(result)){
|
if (!PyDict_Check(result)){
|
||||||
PyErr_Print();
|
if (PyErr_Occurred()){
|
||||||
|
PyErr_Print();
|
||||||
|
}
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
cerr << "[DEBUG] [handle_packet] Result is not a dict" << endl;
|
cerr << "[DEBUG] [handle_packet] Result is not a dict" << endl;
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ class FiregexInterceptor:
|
|||||||
await run_func(self.outstrem_function, self.srv.id, line)
|
await run_func(self.outstrem_function, self.srv.id, line)
|
||||||
|
|
||||||
async def _start_binary(self):
|
async def _start_binary(self):
|
||||||
proxy_binary_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),"../cpproxy")
|
proxy_binary_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../cpproxy"))
|
||||||
self.process = await asyncio.create_subprocess_exec(
|
self.process = await asyncio.create_subprocess_exec(
|
||||||
proxy_binary_path, stdin=asyncio.subprocess.DEVNULL,
|
proxy_binary_path, stdin=asyncio.subprocess.DEVNULL,
|
||||||
stdout=asyncio.subprocess.PIPE,
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
|||||||
@@ -1,23 +1,11 @@
|
|||||||
import functools
|
import functools
|
||||||
from firegex.nfproxy.params import RawPacket
|
from firegex.nfproxy.models import RawPacket, TCPInputStream, TCPOutputStream, TCPClientStream, TCPServerStream, TCPStreams
|
||||||
from enum import Enum
|
from firegex.nfproxy.internals.models import Action, FullStreamAction
|
||||||
|
|
||||||
class Action(Enum):
|
|
||||||
ACCEPT = 0
|
|
||||||
DROP = 1
|
|
||||||
REJECT = 2
|
|
||||||
MANGLE = 3
|
|
||||||
|
|
||||||
class FullStreamAction(Enum):
|
|
||||||
FLUSH = 0
|
|
||||||
ACCEPT = 1
|
|
||||||
REJECT = 2
|
|
||||||
DROP = 3
|
|
||||||
|
|
||||||
ACCEPT = Action.ACCEPT
|
ACCEPT = Action.ACCEPT
|
||||||
DROP = Action.DROP
|
DROP = Action.DROP
|
||||||
REJECT = Action.REJECT
|
REJECT = Action.REJECT
|
||||||
MANGLE = Action.MANGLE
|
UNSTABLE_MANGLE = Action.MANGLE
|
||||||
|
|
||||||
def pyfilter(func):
|
def pyfilter(func):
|
||||||
"""
|
"""
|
||||||
@@ -45,8 +33,7 @@ def clear_pyfilter_registry():
|
|||||||
pyfilter.registry.clear()
|
pyfilter.registry.clear()
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ACCEPT", "DROP", "REJECT", "MANGLE", "EXCEPTION", "INVALID",
|
"ACCEPT", "DROP", "REJECT", "UNSTABLE_MANGLE"
|
||||||
"Action", "FullStreamAction",
|
"Action", "FullStreamAction", "pyfilter",
|
||||||
"pyfilter",
|
"RawPacket", "TCPInputStream", "TCPOutputStream", "TCPClientStream", "TCPServerStream", "TCPStreams"
|
||||||
"RawPacket"
|
|
||||||
]
|
]
|
||||||
@@ -1,60 +1,23 @@
|
|||||||
from inspect import signature
|
from inspect import signature
|
||||||
from firegex.nfproxy.params import RawPacket, NotReadyToRun
|
from firegex.nfproxy.internals.models import Action, FullStreamAction
|
||||||
from firegex.nfproxy import Action, FullStreamAction
|
from firegex.nfproxy.internals.models import FilterHandler, PacketHandlerResult
|
||||||
from dataclasses import dataclass, field
|
import functools
|
||||||
|
from firegex.nfproxy.internals.data import DataStreamCtx
|
||||||
|
from firegex.nfproxy.internals.exceptions import NotReadyToRun
|
||||||
|
from firegex.nfproxy.internals.data import RawPacket
|
||||||
|
|
||||||
type_annotations_associations = {
|
def context_call(glob, func, *args, **kargs):
|
||||||
"tcp": {
|
glob["__firegex_tmp_args"] = args
|
||||||
RawPacket: RawPacket.fetch_from_global
|
glob["__firegex_tmp_kargs"] = kargs
|
||||||
},
|
glob["__firege_tmp_call"] = func
|
||||||
"http": {
|
res = eval("__firege_tmp_call(*__firegex_tmp_args, **__firegex_tmp_kargs)", glob, glob)
|
||||||
RawPacket: RawPacket.fetch_from_global
|
del glob["__firegex_tmp_args"]
|
||||||
}
|
del glob["__firegex_tmp_kargs"]
|
||||||
}
|
del glob["__firege_tmp_call"]
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FilterHandler:
|
|
||||||
func: callable
|
|
||||||
name: str
|
|
||||||
params: dict[type, callable]
|
|
||||||
proto: str
|
|
||||||
|
|
||||||
class internal_data:
|
|
||||||
filter_call_info: list[FilterHandler] = []
|
|
||||||
stream: list[RawPacket] = []
|
|
||||||
stream_size: int = 0
|
|
||||||
stream_max_size: int = 1*8e20
|
|
||||||
full_stream_action: str = "flush"
|
|
||||||
filter_glob: dict = {}
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PacketHandlerResult:
|
|
||||||
glob: dict = field(repr=False)
|
|
||||||
action: Action = Action.ACCEPT
|
|
||||||
matched_by: str = None
|
|
||||||
mangled_packet: bytes = None
|
|
||||||
|
|
||||||
def set_result(self) -> None:
|
|
||||||
self.glob["__firegex_pyfilter_result"] = {
|
|
||||||
"action": self.action.value,
|
|
||||||
"matched_by": self.matched_by,
|
|
||||||
"mangled_packet": self.mangled_packet
|
|
||||||
}
|
|
||||||
|
|
||||||
def reset_result(self) -> None:
|
|
||||||
self.glob["__firegex_pyfilter_result"] = None
|
|
||||||
|
|
||||||
def context_call(func, *args, **kargs):
|
|
||||||
internal_data.filter_glob["__firegex_tmp_args"] = args
|
|
||||||
internal_data.filter_glob["__firegex_tmp_kargs"] = kargs
|
|
||||||
internal_data.filter_glob["__firege_tmp_call"] = func
|
|
||||||
res = eval("__firege_tmp_call(*__firegex_tmp_args, **__firegex_tmp_kargs)", internal_data.filter_glob, internal_data.filter_glob)
|
|
||||||
del internal_data.filter_glob["__firegex_tmp_args"]
|
|
||||||
del internal_data.filter_glob["__firegex_tmp_kargs"]
|
|
||||||
del internal_data.filter_glob["__firege_tmp_call"]
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def generate_filter_structure(filters: list[str], proto:str, glob:dict) -> list[FilterHandler]:
|
def generate_filter_structure(filters: list[str], proto:str, glob:dict) -> list[FilterHandler]:
|
||||||
|
from firegex.nfproxy.models import type_annotations_associations
|
||||||
if proto not in type_annotations_associations.keys():
|
if proto not in type_annotations_associations.keys():
|
||||||
raise Exception("Invalid protocol")
|
raise Exception("Invalid protocol")
|
||||||
res = []
|
res = []
|
||||||
@@ -103,22 +66,27 @@ def get_filters_info(code:str, proto:str) -> list[FilterHandler]:
|
|||||||
def get_filter_names(code:str, proto:str) -> list[str]:
|
def get_filter_names(code:str, proto:str) -> list[str]:
|
||||||
return [ele.name for ele in get_filters_info(code, proto)]
|
return [ele.name for ele in get_filters_info(code, proto)]
|
||||||
|
|
||||||
def handle_packet() -> None:
|
def handle_packet(glob: dict) -> None:
|
||||||
|
internal_data = DataStreamCtx(glob)
|
||||||
|
print("I'm here", flush=True)
|
||||||
cache_call = {} # Cache of the data handler calls
|
cache_call = {} # Cache of the data handler calls
|
||||||
|
|
||||||
pkt_info = RawPacket.fetch_from_global(internal_data.filter_glob)
|
pkt_info = RawPacket._fetch_packet(internal_data)
|
||||||
|
internal_data.current_pkt = pkt_info
|
||||||
cache_call[RawPacket] = pkt_info
|
cache_call[RawPacket] = pkt_info
|
||||||
|
|
||||||
final_result = Action.ACCEPT
|
final_result = Action.ACCEPT
|
||||||
data_size = len(pkt_info.data)
|
data_size = len(pkt_info.data)
|
||||||
|
|
||||||
result = PacketHandlerResult(internal_data.filter_glob)
|
result = PacketHandlerResult(glob)
|
||||||
|
|
||||||
if internal_data.stream_size+data_size > internal_data.stream_max_size:
|
if internal_data.stream_size+data_size > internal_data.stream_max_size:
|
||||||
match internal_data.full_stream_action:
|
match internal_data.full_stream_action:
|
||||||
case FullStreamAction.FLUSH:
|
case FullStreamAction.FLUSH:
|
||||||
internal_data.stream = []
|
internal_data.stream = []
|
||||||
internal_data.stream_size = 0
|
internal_data.stream_size = 0
|
||||||
|
for func in internal_data.flush_action_set:
|
||||||
|
func()
|
||||||
case FullStreamAction.ACCEPT:
|
case FullStreamAction.ACCEPT:
|
||||||
result.action = Action.ACCEPT
|
result.action = Action.ACCEPT
|
||||||
return result.set_result()
|
return result.set_result()
|
||||||
@@ -138,17 +106,19 @@ def handle_packet() -> None:
|
|||||||
mangled_packet = None
|
mangled_packet = None
|
||||||
for filter in internal_data.filter_call_info:
|
for filter in internal_data.filter_call_info:
|
||||||
final_params = []
|
final_params = []
|
||||||
|
skip_call = False
|
||||||
for data_type, data_func in filter.params.items():
|
for data_type, data_func in filter.params.items():
|
||||||
if data_type not in cache_call.keys():
|
if data_type not in cache_call.keys():
|
||||||
try:
|
try:
|
||||||
cache_call[data_type] = data_func(internal_data.filter_glob)
|
cache_call[data_type] = data_func(internal_data)
|
||||||
except NotReadyToRun:
|
except NotReadyToRun:
|
||||||
cache_call[data_type] = None
|
cache_call[data_type] = None
|
||||||
if cache_call[data_type] is None:
|
skip_call = True
|
||||||
continue # Parsing raised NotReadyToRun, skip filter
|
break
|
||||||
final_params.append(cache_call[data_type])
|
final_params.append(cache_call[data_type])
|
||||||
|
if skip_call:
|
||||||
res = context_call(filter.func, *final_params)
|
continue
|
||||||
|
res = context_call(glob, filter.func, *final_params)
|
||||||
|
|
||||||
if res is None:
|
if res is None:
|
||||||
continue #ACCEPTED
|
continue #ACCEPTED
|
||||||
@@ -168,8 +138,10 @@ def handle_packet() -> None:
|
|||||||
return result.set_result()
|
return result.set_result()
|
||||||
|
|
||||||
|
|
||||||
def compile(glob:dict) -> None:
|
def compile(glob:dict) -> None:
|
||||||
internal_data.filter_glob = glob
|
internal_data = DataStreamCtx(glob)
|
||||||
|
|
||||||
|
glob["print"] = functools.partial(print, flush = True)
|
||||||
|
|
||||||
filters = glob["__firegex_pyfilter_enabled"]
|
filters = glob["__firegex_pyfilter_enabled"]
|
||||||
proto = glob["__firegex_proto"]
|
proto = glob["__firegex_proto"]
|
||||||
@@ -187,3 +159,4 @@ def compile(glob:dict) -> None:
|
|||||||
internal_data.full_stream_action = FullStreamAction.FLUSH
|
internal_data.full_stream_action = FullStreamAction.FLUSH
|
||||||
|
|
||||||
PacketHandlerResult(glob).reset_result()
|
PacketHandlerResult(glob).reset_result()
|
||||||
|
|
||||||
190
fgex-lib/firegex/nfproxy/internals/data.py
Normal file
190
fgex-lib/firegex/nfproxy/internals/data.py
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
from firegex.nfproxy.internals.models import FilterHandler
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
class RawPacket:
|
||||||
|
"""
|
||||||
|
class rapresentation of the nfqueue packet sent in this context by the c++ core
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
data: bytes,
|
||||||
|
raw_packet: bytes,
|
||||||
|
is_input: bool,
|
||||||
|
is_ipv6: bool,
|
||||||
|
is_tcp: bool,
|
||||||
|
l4_size: int,
|
||||||
|
):
|
||||||
|
self.__data = bytes(data)
|
||||||
|
self.__raw_packet = bytes(raw_packet)
|
||||||
|
self.__is_input = bool(is_input)
|
||||||
|
self.__is_ipv6 = bool(is_ipv6)
|
||||||
|
self.__is_tcp = bool(is_tcp)
|
||||||
|
self.__l4_size = int(l4_size)
|
||||||
|
self.__raw_packet_header_size = len(self.__raw_packet)-self.__l4_size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_input(self) -> bool:
|
||||||
|
return self.__is_input
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ipv6(self) -> bool:
|
||||||
|
return self.__is_ipv6
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_tcp(self) -> bool:
|
||||||
|
return self.__is_tcp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> bytes:
|
||||||
|
return self.__data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def l4_size(self) -> int:
|
||||||
|
return self.__l4_size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def raw_packet_header_len(self) -> int:
|
||||||
|
return self.__raw_packet_header_size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def l4_data(self) -> bytes:
|
||||||
|
return self.__raw_packet[self.raw_packet_header_len:]
|
||||||
|
|
||||||
|
@l4_data.setter
|
||||||
|
def l4_data(self, v:bytes):
|
||||||
|
if not isinstance(v, bytes):
|
||||||
|
raise Exception("Invalid data type, data MUST be of type bytes")
|
||||||
|
#if len(v) != self.__l4_size:
|
||||||
|
# raise Exception("Invalid data size, must be equal to the original packet header size (due to a technical limitation)")
|
||||||
|
self.__raw_packet = self.__raw_packet[:self.raw_packet_header_len]+v
|
||||||
|
self.__l4_size = len(v)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def raw_packet(self) -> bytes:
|
||||||
|
return self.__raw_packet
|
||||||
|
|
||||||
|
@raw_packet.setter
|
||||||
|
def raw_packet(self, v:bytes):
|
||||||
|
if not isinstance(v, bytes):
|
||||||
|
raise Exception("Invalid data type, data MUST be of type bytes")
|
||||||
|
#if len(v) != len(self.__raw_packet):
|
||||||
|
# raise Exception("Invalid data size, must be equal to the original packet size (due to a technical limitation)")
|
||||||
|
if len(v) < self.raw_packet_header_len:
|
||||||
|
raise Exception("Invalid data size, must be greater than the original packet header size")
|
||||||
|
self.__raw_packet = v
|
||||||
|
self.__l4_size = len(v)-self.raw_packet_header_len
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _fetch_packet(cls, internal_data):
|
||||||
|
from firegex.nfproxy.internals.data import DataStreamCtx
|
||||||
|
if not isinstance(internal_data, DataStreamCtx):
|
||||||
|
if isinstance(internal_data, dict):
|
||||||
|
internal_data = DataStreamCtx(internal_data)
|
||||||
|
else:
|
||||||
|
raise Exception("Invalid data type, data MUST be of type DataStream, or glob dict")
|
||||||
|
|
||||||
|
if "__firegex_packet_info" not in internal_data.filter_glob.keys():
|
||||||
|
raise Exception("Packet info not found")
|
||||||
|
return cls(**internal_data.filter_glob["__firegex_packet_info"])
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"RawPacket(data={self.data}, raw_packet={self.raw_packet}, is_input={self.is_input}, is_ipv6={self.is_ipv6}, is_tcp={self.is_tcp}, l4_size={self.l4_size})"
|
||||||
|
|
||||||
|
|
||||||
|
class DataStreamCtx:
|
||||||
|
|
||||||
|
def __init__(self, glob: dict):
|
||||||
|
if "__firegex_pyfilter_ctx" not in glob.keys():
|
||||||
|
glob["__firegex_pyfilter_ctx"] = {}
|
||||||
|
self.__data = glob["__firegex_pyfilter_ctx"]
|
||||||
|
self.filter_glob = glob
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filter_call_info(self) -> list[FilterHandler]:
|
||||||
|
if "filter_call_info" not in self.__data.keys():
|
||||||
|
self.__data["filter_call_info"] = []
|
||||||
|
return self.__data.get("filter_call_info")
|
||||||
|
|
||||||
|
@filter_call_info.setter
|
||||||
|
def filter_call_info(self, v: list[FilterHandler]):
|
||||||
|
self.__data["filter_call_info"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stream(self) -> list[RawPacket]:
|
||||||
|
if "stream" not in self.__data.keys():
|
||||||
|
self.__data["stream"] = []
|
||||||
|
return self.__data.get("stream")
|
||||||
|
|
||||||
|
@stream.setter
|
||||||
|
def stream(self, v: list[RawPacket]):
|
||||||
|
self.__data["stream"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stream_size(self) -> int:
|
||||||
|
if "stream_size" not in self.__data.keys():
|
||||||
|
self.__data["stream_size"] = 0
|
||||||
|
return self.__data.get("stream_size")
|
||||||
|
|
||||||
|
@stream_size.setter
|
||||||
|
def stream_size(self, v: int):
|
||||||
|
self.__data["stream_size"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stream_max_size(self) -> int:
|
||||||
|
if "stream_max_size" not in self.__data.keys():
|
||||||
|
self.__data["stream_max_size"] = 1*8e20
|
||||||
|
return self.__data.get("stream_max_size")
|
||||||
|
|
||||||
|
@stream_max_size.setter
|
||||||
|
def stream_max_size(self, v: int):
|
||||||
|
self.__data["stream_max_size"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def full_stream_action(self) -> str:
|
||||||
|
if "full_stream_action" not in self.__data.keys():
|
||||||
|
self.__data["full_stream_action"] = "flush"
|
||||||
|
return self.__data.get("full_stream_action")
|
||||||
|
|
||||||
|
@full_stream_action.setter
|
||||||
|
def full_stream_action(self, v: str):
|
||||||
|
self.__data["full_stream_action"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_pkt(self) -> RawPacket:
|
||||||
|
return self.__data.get("current_pkt", None)
|
||||||
|
|
||||||
|
@current_pkt.setter
|
||||||
|
def current_pkt(self, v: RawPacket):
|
||||||
|
self.__data["current_pkt"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def http_data_objects(self) -> dict:
|
||||||
|
if "http_data_objects" not in self.__data.keys():
|
||||||
|
self.__data["http_data_objects"] = {}
|
||||||
|
return self.__data.get("http_data_objects")
|
||||||
|
|
||||||
|
@http_data_objects.setter
|
||||||
|
def http_data_objects(self, v: dict):
|
||||||
|
self.__data["http_data_objects"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def save_http_data_in_streams(self) -> bool:
|
||||||
|
if "save_http_data_in_streams" not in self.__data.keys():
|
||||||
|
self.__data["save_http_data_in_streams"] = False
|
||||||
|
return self.__data.get("save_http_data_in_streams")
|
||||||
|
|
||||||
|
@save_http_data_in_streams.setter
|
||||||
|
def save_http_data_in_streams(self, v: bool):
|
||||||
|
self.__data["save_http_data_in_streams"] = v
|
||||||
|
|
||||||
|
@property
|
||||||
|
def flush_action_set(self) -> set[Callable]:
|
||||||
|
if "flush_action_set" not in self.__data.keys():
|
||||||
|
self.__data["flush_action_set"] = set()
|
||||||
|
return self.__data.get("flush_action_set")
|
||||||
|
|
||||||
|
@flush_action_set.setter
|
||||||
|
def flush_action_set(self, v: set[Callable]):
|
||||||
|
self.__data["flush_action_set"] = v
|
||||||
|
|
||||||
|
|
||||||
3
fgex-lib/firegex/nfproxy/internals/exceptions.py
Normal file
3
fgex-lib/firegex/nfproxy/internals/exceptions.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
class NotReadyToRun(Exception):
|
||||||
|
"raise this exception if the stream state is not ready to parse this object, the call will be skipped"
|
||||||
40
fgex-lib/firegex/nfproxy/internals/models.py
Normal file
40
fgex-lib/firegex/nfproxy/internals/models.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class Action(Enum):
|
||||||
|
ACCEPT = 0
|
||||||
|
DROP = 1
|
||||||
|
REJECT = 2
|
||||||
|
MANGLE = 3
|
||||||
|
|
||||||
|
class FullStreamAction(Enum):
|
||||||
|
FLUSH = 0
|
||||||
|
ACCEPT = 1
|
||||||
|
REJECT = 2
|
||||||
|
DROP = 3
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FilterHandler:
|
||||||
|
func: callable
|
||||||
|
name: str
|
||||||
|
params: dict[type, callable]
|
||||||
|
proto: str
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PacketHandlerResult:
|
||||||
|
glob: dict = field(repr=False)
|
||||||
|
action: Action = Action.ACCEPT
|
||||||
|
matched_by: str = None
|
||||||
|
mangled_packet: bytes = None
|
||||||
|
|
||||||
|
def set_result(self) -> None:
|
||||||
|
self.glob["__firegex_pyfilter_result"] = {
|
||||||
|
"action": self.action.value,
|
||||||
|
"matched_by": self.matched_by,
|
||||||
|
"mangled_packet": self.mangled_packet
|
||||||
|
}
|
||||||
|
|
||||||
|
def reset_result(self) -> None:
|
||||||
|
self.glob["__firegex_pyfilter_result"] = None
|
||||||
|
|
||||||
|
|
||||||
28
fgex-lib/firegex/nfproxy/models/__init__.py
Normal file
28
fgex-lib/firegex/nfproxy/models/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from firegex.nfproxy.models.tcp import TCPInputStream, TCPOutputStream, TCPClientStream, TCPServerStream, TCPStreams
|
||||||
|
from firegex.nfproxy.models.http import HttpRequest, HttpResponse, HttpRequestHeader, HttpResponseHeader
|
||||||
|
from firegex.nfproxy.internals.data import RawPacket
|
||||||
|
|
||||||
|
type_annotations_associations = {
|
||||||
|
"tcp": {
|
||||||
|
RawPacket: RawPacket._fetch_packet,
|
||||||
|
TCPInputStream: TCPInputStream._fetch_packet,
|
||||||
|
TCPOutputStream: TCPOutputStream._fetch_packet,
|
||||||
|
TCPStreams: TCPStreams._fetch_packet,
|
||||||
|
},
|
||||||
|
"http": {
|
||||||
|
RawPacket: RawPacket._fetch_packet,
|
||||||
|
TCPInputStream: TCPInputStream._fetch_packet,
|
||||||
|
TCPOutputStream: TCPOutputStream._fetch_packet,
|
||||||
|
TCPStreams: TCPStreams._fetch_packet,
|
||||||
|
HttpRequest: HttpRequest._fetch_packet,
|
||||||
|
HttpResponse: HttpResponse._fetch_packet,
|
||||||
|
HttpRequestHeader: HttpRequestHeader._fetch_packet,
|
||||||
|
HttpResponseHeader: HttpResponseHeader._fetch_packet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"RawPacket",
|
||||||
|
"TCPInputStream", "TCPOutputStream", "TCPClientStream", "TCPServerStream", "TCPStreams",
|
||||||
|
"HttpRequest", "HttpResponse", "HttpRequestHeader", "HttpResponseHeader",
|
||||||
|
]
|
||||||
385
fgex-lib/firegex/nfproxy/models/http.py
Normal file
385
fgex-lib/firegex/nfproxy/models/http.py
Normal file
@@ -0,0 +1,385 @@
|
|||||||
|
import pyllhttp
|
||||||
|
from firegex.nfproxy.internals.exceptions import NotReadyToRun
|
||||||
|
from firegex.nfproxy.internals.data import DataStreamCtx
|
||||||
|
|
||||||
|
class InternalCallbackHandler():
|
||||||
|
|
||||||
|
url: str|None = None
|
||||||
|
_url_buffer: bytes = b""
|
||||||
|
headers: dict[str, str] = {}
|
||||||
|
_header_fields: dict[bytes, bytes] = {}
|
||||||
|
has_begun: bool = False
|
||||||
|
body: bytes = None
|
||||||
|
_body_buffer: bytes = b""
|
||||||
|
headers_complete: bool = False
|
||||||
|
message_complete: bool = False
|
||||||
|
status: str|None = None
|
||||||
|
_status_buffer: bytes = b""
|
||||||
|
current_header_field = None
|
||||||
|
current_header_value = None
|
||||||
|
_save_body = True
|
||||||
|
|
||||||
|
def on_message_begin(self):
|
||||||
|
self.has_begun = True
|
||||||
|
|
||||||
|
def on_url(self, url):
|
||||||
|
self._url_buffer += url
|
||||||
|
|
||||||
|
def on_url_complete(self):
|
||||||
|
self.url = self._url_buffer.decode(errors="ignore")
|
||||||
|
self._url_buffer = None
|
||||||
|
|
||||||
|
def on_header_field(self, field):
|
||||||
|
if self.current_header_field is None:
|
||||||
|
self.current_header_field = bytearray(field)
|
||||||
|
else:
|
||||||
|
self.current_header_field += field
|
||||||
|
|
||||||
|
def on_header_field_complete(self):
|
||||||
|
self.current_header_field = self.current_header_field
|
||||||
|
|
||||||
|
def on_header_value(self, value):
|
||||||
|
if self.current_header_value is None:
|
||||||
|
self.current_header_value = bytearray(value)
|
||||||
|
else:
|
||||||
|
self.current_header_value += value
|
||||||
|
|
||||||
|
def on_header_value_complete(self):
|
||||||
|
if self.current_header_value is not None and self.current_header_field is not None:
|
||||||
|
self._header_fields[self.current_header_field.decode(errors="ignore")] = self.current_header_value.decode(errors="ignore")
|
||||||
|
self.current_header_field = None
|
||||||
|
self.current_header_value = None
|
||||||
|
|
||||||
|
def on_headers_complete(self):
|
||||||
|
self.headers_complete = True
|
||||||
|
self.headers = self._header_fields
|
||||||
|
self._header_fields = {}
|
||||||
|
self.current_header_field = None
|
||||||
|
self.current_header_value = None
|
||||||
|
|
||||||
|
def on_body(self, body: bytes):
|
||||||
|
if self._save_body:
|
||||||
|
self._body_buffer += body
|
||||||
|
|
||||||
|
def on_message_complete(self):
|
||||||
|
self.body = self._body_buffer
|
||||||
|
self._body_buffer = b""
|
||||||
|
self.message_complete = True
|
||||||
|
|
||||||
|
def on_status(self, status: bytes):
|
||||||
|
self._status_buffer += status
|
||||||
|
|
||||||
|
def on_status_complete(self):
|
||||||
|
self.status = self._status_buffer.decode(errors="ignore")
|
||||||
|
self._status_buffer = b""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def keep_alive(self) -> bool:
|
||||||
|
return self.should_keep_alive
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_upgrade(self) -> bool:
|
||||||
|
return self.is_upgrading
|
||||||
|
|
||||||
|
@property
|
||||||
|
def http_version(self) -> str:
|
||||||
|
return f"{self.major}.{self.minor}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def method_parsed(self) -> str:
|
||||||
|
return self.method.decode(errors="ignore")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_length_parsed(self) -> int:
|
||||||
|
return self.content_length
|
||||||
|
|
||||||
|
|
||||||
|
class InternalHttpRequest(InternalCallbackHandler, pyllhttp.Request):
|
||||||
|
def __init__(self):
|
||||||
|
super(pyllhttp.Request, self).__init__()
|
||||||
|
super(InternalCallbackHandler, self).__init__()
|
||||||
|
|
||||||
|
class InternalHttpResponse(InternalCallbackHandler, pyllhttp.Response):
|
||||||
|
def __init__(self):
|
||||||
|
super(pyllhttp.Response, self).__init__()
|
||||||
|
super(InternalCallbackHandler, self).__init__()
|
||||||
|
|
||||||
|
class InternalBasicHttpMetaClass:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._parser: InternalHttpRequest|InternalHttpResponse
|
||||||
|
self._headers_were_set = False
|
||||||
|
self.stream = b""
|
||||||
|
self.raised_error = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> str|None:
|
||||||
|
return self._parser.url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self) -> dict[str, str]:
|
||||||
|
return self._parser.headers
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_begun(self) -> bool:
|
||||||
|
return self._parser.has_begun
|
||||||
|
|
||||||
|
@property
|
||||||
|
def body(self) -> bytes:
|
||||||
|
return self._parser.body
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers_complete(self) -> bool:
|
||||||
|
return self._parser.headers_complete
|
||||||
|
|
||||||
|
@property
|
||||||
|
def message_complete(self) -> bool:
|
||||||
|
return self._parser.message_complete
|
||||||
|
|
||||||
|
@property
|
||||||
|
def http_version(self) -> str:
|
||||||
|
return self._parser.http_version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def keep_alive(self) -> bool:
|
||||||
|
return self._parser.keep_alive
|
||||||
|
|
||||||
|
@property
|
||||||
|
def should_upgrade(self) -> bool:
|
||||||
|
return self._parser.should_upgrade
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_length(self) -> int|None:
|
||||||
|
return self._parser.content_length_parsed
|
||||||
|
|
||||||
|
@property
|
||||||
|
def method(self) -> str|None:
|
||||||
|
return self._parser.method_parsed
|
||||||
|
|
||||||
|
def _fetch_current_packet(self, internal_data: DataStreamCtx):
|
||||||
|
# TODO: if an error is triggered should I reject the connection?
|
||||||
|
if internal_data.save_http_data_in_streams: # This is a websocket upgrade!
|
||||||
|
self.stream += internal_data.current_pkt.data
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self._parser.execute(internal_data.current_pkt.data)
|
||||||
|
if self._parser.headers_complete and len(self._parser._body_buffer) == self._parser.content_length_parsed:
|
||||||
|
self._parser.on_message_complete()
|
||||||
|
except Exception as e:
|
||||||
|
self.raised_error = True
|
||||||
|
raise e
|
||||||
|
|
||||||
|
#It's called the first time if the headers are complete, and second time with body complete
|
||||||
|
def _callable_checks(self, internal_data: DataStreamCtx):
|
||||||
|
if self._parser.headers_complete and not self._headers_were_set:
|
||||||
|
self._headers_were_set = True
|
||||||
|
return True
|
||||||
|
return self._parser.message_complete or internal_data.save_http_data_in_streams
|
||||||
|
|
||||||
|
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _trigger_remove_data(self, internal_data: DataStreamCtx):
|
||||||
|
return self.message_complete
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _fetch_packet(cls, internal_data: DataStreamCtx):
|
||||||
|
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False:
|
||||||
|
raise NotReadyToRun()
|
||||||
|
|
||||||
|
datahandler:InternalBasicHttpMetaClass = internal_data.http_data_objects.get(cls, None)
|
||||||
|
if datahandler is None or datahandler.raised_error:
|
||||||
|
datahandler = cls()
|
||||||
|
internal_data.http_data_objects[cls] = datahandler
|
||||||
|
|
||||||
|
if not datahandler._before_fetch_callable_checks(internal_data):
|
||||||
|
raise NotReadyToRun()
|
||||||
|
datahandler._fetch_current_packet(internal_data)
|
||||||
|
if not datahandler._callable_checks(internal_data):
|
||||||
|
raise NotReadyToRun()
|
||||||
|
|
||||||
|
if datahandler.should_upgrade:
|
||||||
|
internal_data.save_http_data_in_streams = True
|
||||||
|
|
||||||
|
if datahandler._trigger_remove_data(internal_data):
|
||||||
|
if internal_data.http_data_objects.get(cls):
|
||||||
|
del internal_data.http_data_objects[cls]
|
||||||
|
|
||||||
|
return datahandler
|
||||||
|
|
||||||
|
class HttpRequest(InternalBasicHttpMetaClass):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
# These will be used in the metaclass
|
||||||
|
self._parser: InternalHttpRequest = InternalHttpRequest()
|
||||||
|
self._headers_were_set = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def method(self) -> bytes:
|
||||||
|
return self._parser.method
|
||||||
|
|
||||||
|
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
|
||||||
|
return internal_data.current_pkt.is_input
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<HttpRequest method={self.method} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} has_begun={self.has_begun} content_length={self.content_length} stream={self.stream}>"
|
||||||
|
|
||||||
|
class HttpResponse(InternalBasicHttpMetaClass):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self._parser: InternalHttpResponse = InternalHttpResponse()
|
||||||
|
self._headers_were_set = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status_code(self) -> int:
|
||||||
|
return self._parser.status
|
||||||
|
|
||||||
|
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
|
||||||
|
return not internal_data.current_pkt.is_input
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<HttpResponse status_code={self.status_code} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} has_begun={self.has_begun} content_length={self.content_length} stream={self.stream}>"
|
||||||
|
|
||||||
|
class HttpRequestHeader(HttpRequest):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self._parser._save_body = False
|
||||||
|
|
||||||
|
def _callable_checks(self, internal_data: DataStreamCtx):
|
||||||
|
if self._parser.headers_complete and not self._headers_were_set:
|
||||||
|
self._headers_were_set = True
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
class HttpResponseHeader(HttpResponse):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self._parser._save_body = False
|
||||||
|
|
||||||
|
def _callable_checks(self, internal_data: DataStreamCtx):
|
||||||
|
if self._parser.headers_complete and not self._headers_were_set:
|
||||||
|
self._headers_were_set = True
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
"""
|
||||||
|
#TODO include this?
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
|
||||||
|
# Null bytes; no need to recreate these on each call to guess_json_utf
|
||||||
|
_null = "\x00".encode("ascii") # encoding to ASCII for Python 3
|
||||||
|
_null2 = _null * 2
|
||||||
|
_null3 = _null * 3
|
||||||
|
|
||||||
|
def guess_json_utf(data):
|
||||||
|
""
|
||||||
|
:rtype: str
|
||||||
|
""
|
||||||
|
# JSON always starts with two ASCII characters, so detection is as
|
||||||
|
# easy as counting the nulls and from their location and count
|
||||||
|
# determine the encoding. Also detect a BOM, if present.
|
||||||
|
sample = data[:4]
|
||||||
|
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
|
||||||
|
return "utf-32" # BOM included
|
||||||
|
if sample[:3] == codecs.BOM_UTF8:
|
||||||
|
return "utf-8-sig" # BOM included, MS style (discouraged)
|
||||||
|
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
|
||||||
|
return "utf-16" # BOM included
|
||||||
|
nullcount = sample.count(_null)
|
||||||
|
if nullcount == 0:
|
||||||
|
return "utf-8"
|
||||||
|
if nullcount == 2:
|
||||||
|
if sample[::2] == _null2: # 1st and 3rd are null
|
||||||
|
return "utf-16-be"
|
||||||
|
if sample[1::2] == _null2: # 2nd and 4th are null
|
||||||
|
return "utf-16-le"
|
||||||
|
# Did not detect 2 valid UTF-16 ascii-range characters
|
||||||
|
if nullcount == 3:
|
||||||
|
if sample[:3] == _null3:
|
||||||
|
return "utf-32-be"
|
||||||
|
if sample[1:] == _null3:
|
||||||
|
return "utf-32-le"
|
||||||
|
# Did not detect a valid UTF-32 ascii-range character
|
||||||
|
return None
|
||||||
|
|
||||||
|
from http_parser.pyparser import HttpParser
|
||||||
|
import json
|
||||||
|
from urllib.parse import parse_qsl
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class HttpMessage():
|
||||||
|
fragment: str
|
||||||
|
headers: dict
|
||||||
|
method: str
|
||||||
|
parameters: dict
|
||||||
|
path: str
|
||||||
|
query_string: str
|
||||||
|
raw_body: bytes
|
||||||
|
status_code: int
|
||||||
|
url: str
|
||||||
|
version: str
|
||||||
|
|
||||||
|
class HttpMessageParser(HttpParser):
|
||||||
|
def __init__(self, data:bytes, decompress_body=True):
|
||||||
|
super().__init__(decompress = decompress_body)
|
||||||
|
self.execute(data, len(data))
|
||||||
|
self._parameters = {}
|
||||||
|
try:
|
||||||
|
self._parse_parameters()
|
||||||
|
except Exception as e:
|
||||||
|
print("Error in parameters parsing:", data)
|
||||||
|
print("Exception:", str(e))
|
||||||
|
|
||||||
|
def get_raw_body(self):
|
||||||
|
return b"\r\n".join(self._body)
|
||||||
|
|
||||||
|
def _parse_query_string(self, raw_string):
|
||||||
|
parameters = parse_qsl(raw_string)
|
||||||
|
for key,value in parameters:
|
||||||
|
try:
|
||||||
|
key = key.decode()
|
||||||
|
value = value.decode()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if self._parameters.get(key):
|
||||||
|
if isinstance(self._parameters[key], list):
|
||||||
|
self._parameters[key].append(value)
|
||||||
|
else:
|
||||||
|
self._parameters[key] = [self._parameters[key], value]
|
||||||
|
else:
|
||||||
|
self._parameters[key] = value
|
||||||
|
|
||||||
|
def _parse_parameters(self):
|
||||||
|
if self._method == "POST":
|
||||||
|
body = self.get_raw_body()
|
||||||
|
if len(body) == 0:
|
||||||
|
return
|
||||||
|
content_type = self.get_headers().get("Content-Type")
|
||||||
|
if not content_type or "x-www-form-urlencoded" in content_type:
|
||||||
|
try:
|
||||||
|
self._parse_query_string(body.decode())
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif "json" in content_type:
|
||||||
|
self._parameters = json.loads(body)
|
||||||
|
elif self._method == "GET":
|
||||||
|
self._parse_query_string(self._query_string)
|
||||||
|
|
||||||
|
def get_parameters(self):
|
||||||
|
""returns parameters parsed from query string or body""
|
||||||
|
return self._parameters
|
||||||
|
|
||||||
|
def get_version(self):
|
||||||
|
if self._version:
|
||||||
|
return ".".join([str(x) for x in self._version])
|
||||||
|
return None
|
||||||
|
|
||||||
|
def to_message(self):
|
||||||
|
return HttpMessage(self._fragment, self._headers, self._method,
|
||||||
|
self._parameters, self._path, self._query_string,
|
||||||
|
self.get_raw_body(), self._status_code,
|
||||||
|
self._url, self.get_version()
|
||||||
|
)
|
||||||
|
"""
|
||||||
113
fgex-lib/firegex/nfproxy/models/tcp.py
Normal file
113
fgex-lib/firegex/nfproxy/models/tcp.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
from firegex.nfproxy.internals.data import DataStreamCtx
|
||||||
|
from firegex.nfproxy.internals.exceptions import NotReadyToRun
|
||||||
|
|
||||||
|
class TCPStreams:
|
||||||
|
"""
|
||||||
|
This datamodel will assemble the TCP streams from the input and output data.
|
||||||
|
The function that use this data model will be handled when:
|
||||||
|
- The packet is TCP
|
||||||
|
- At least 1 packet has been sent
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
input_data: bytes,
|
||||||
|
output_data: bytes,
|
||||||
|
is_ipv6: bool,
|
||||||
|
):
|
||||||
|
self.__input_data = bytes(input_data)
|
||||||
|
self.__output_data = bytes(output_data)
|
||||||
|
self.__is_ipv6 = bool(is_ipv6)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def input_data(self) -> bytes:
|
||||||
|
return self.__input_data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def output_data(self) -> bytes:
|
||||||
|
return self.__output_data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ipv6(self) -> bool:
|
||||||
|
return self.__is_ipv6
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _fetch_packet(cls, internal_data:DataStreamCtx):
|
||||||
|
|
||||||
|
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False:
|
||||||
|
raise NotReadyToRun()
|
||||||
|
return cls(
|
||||||
|
input_data=b"".join([ele.data for ele in internal_data.stream if ele.is_input]),
|
||||||
|
output_data=b"".join([ele.data for ele in internal_data.stream if not ele.is_input]),
|
||||||
|
is_ipv6=internal_data.current_pkt.is_ipv6,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TCPInputStream:
|
||||||
|
"""
|
||||||
|
This datamodel will assemble the TCP input stream from the client sent data.
|
||||||
|
The function that use this data model will be handled when:
|
||||||
|
- The packet is TCP
|
||||||
|
- At least 1 packet has been sent
|
||||||
|
- A new client packet has been received
|
||||||
|
"""
|
||||||
|
def __init__(self,
|
||||||
|
data: bytes,
|
||||||
|
is_ipv6: bool,
|
||||||
|
):
|
||||||
|
self.__data = bytes(data)
|
||||||
|
self.__is_ipv6 = bool(is_ipv6)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> bool:
|
||||||
|
return self.__data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ipv6(self) -> bool:
|
||||||
|
return self.__is_ipv6
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _fetch_packet(cls, internal_data:DataStreamCtx):
|
||||||
|
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False or internal_data.current_pkt.is_input is False:
|
||||||
|
raise NotReadyToRun()
|
||||||
|
return cls(
|
||||||
|
data=internal_data.current_pkt.get_related_raw_stream(),
|
||||||
|
is_ipv6=internal_data.current_pkt.is_ipv6,
|
||||||
|
)
|
||||||
|
|
||||||
|
TCPClientStream = TCPInputStream
|
||||||
|
|
||||||
|
class TCPOutputStream:
|
||||||
|
"""
|
||||||
|
This datamodel will assemble the TCP output stream from the server sent data.
|
||||||
|
The function that use this data model will be handled when:
|
||||||
|
- The packet is TCP
|
||||||
|
- At least 1 packet has been sent
|
||||||
|
- A new server packet has been sent
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
data: bytes,
|
||||||
|
is_ipv6: bool,
|
||||||
|
):
|
||||||
|
self.__data = bytes(data)
|
||||||
|
self.__is_ipv6 = bool(is_ipv6)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> bool:
|
||||||
|
return self.__data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_ipv6(self) -> bool:
|
||||||
|
return self.__is_ipv6
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _fetch_packet(cls, internal_data:DataStreamCtx):
|
||||||
|
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False or internal_data.current_pkt.is_input is True:
|
||||||
|
raise NotReadyToRun()
|
||||||
|
return cls(
|
||||||
|
data=internal_data.current_pkt.get_related_raw_stream(),
|
||||||
|
is_ipv6=internal_data.current_pkt.is_ipv6,
|
||||||
|
)
|
||||||
|
|
||||||
|
TCPServerStream = TCPOutputStream
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
|
|
||||||
class NotReadyToRun(Exception): # raise this exception if the stream state is not ready to parse this object, the call will be skipped
|
|
||||||
pass
|
|
||||||
|
|
||||||
class RawPacket:
|
|
||||||
def __init__(self,
|
|
||||||
data: bytes,
|
|
||||||
raw_packet: bytes,
|
|
||||||
is_input: bool,
|
|
||||||
is_ipv6: bool,
|
|
||||||
is_tcp: bool,
|
|
||||||
l4_size: int,
|
|
||||||
):
|
|
||||||
self.__data = bytes(data)
|
|
||||||
self.__raw_packet = bytes(raw_packet)
|
|
||||||
self.__is_input = bool(is_input)
|
|
||||||
self.__is_ipv6 = bool(is_ipv6)
|
|
||||||
self.__is_tcp = bool(is_tcp)
|
|
||||||
self.__l4_size = int(l4_size)
|
|
||||||
self.__raw_packet_header_size = len(self.__raw_packet)-self.__l4_size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_input(self) -> bool:
|
|
||||||
return self.__is_input
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_ipv6(self) -> bool:
|
|
||||||
return self.__is_ipv6
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_tcp(self) -> bool:
|
|
||||||
return self.__is_tcp
|
|
||||||
|
|
||||||
@property
|
|
||||||
def data(self) -> bytes:
|
|
||||||
return self.__data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def l4_size(self) -> int:
|
|
||||||
return self.__l4_size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def raw_packet_header_len(self) -> int:
|
|
||||||
return self.__raw_packet_header_size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def l4_data(self) -> bytes:
|
|
||||||
return self.__raw_packet[self.raw_packet_header_len:]
|
|
||||||
|
|
||||||
@l4_data.setter
|
|
||||||
def l4_data(self, v:bytes):
|
|
||||||
if not isinstance(v, bytes):
|
|
||||||
raise Exception("Invalid data type, data MUST be of type bytes")
|
|
||||||
#if len(v) != self.__l4_size:
|
|
||||||
# raise Exception("Invalid data size, must be equal to the original packet header size (due to a technical limitation)")
|
|
||||||
self.__raw_packet = self.__raw_packet[:self.raw_packet_header_len]+v
|
|
||||||
self.__l4_size = len(v)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def raw_packet(self) -> bytes:
|
|
||||||
return self.__raw_packet
|
|
||||||
|
|
||||||
@raw_packet.setter
|
|
||||||
def raw_packet(self, v:bytes):
|
|
||||||
if not isinstance(v, bytes):
|
|
||||||
raise Exception("Invalid data type, data MUST be of type bytes")
|
|
||||||
#if len(v) != len(self.__raw_packet):
|
|
||||||
# raise Exception("Invalid data size, must be equal to the original packet size (due to a technical limitation)")
|
|
||||||
if len(v) < self.raw_packet_header_len:
|
|
||||||
raise Exception("Invalid data size, must be greater than the original packet header size")
|
|
||||||
self.__raw_packet = v
|
|
||||||
self.__l4_size = len(v)-self.raw_packet_header_len
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fetch_from_global(cls, glob):
|
|
||||||
if "__firegex_packet_info" not in glob.keys():
|
|
||||||
raise Exception("Packet info not found")
|
|
||||||
return cls(**glob["__firegex_packet_info"])
|
|
||||||
|
|
||||||
@@ -7,4 +7,5 @@ textual==2.1.0
|
|||||||
python-socketio[client]==5.12.1
|
python-socketio[client]==5.12.1
|
||||||
fgex
|
fgex
|
||||||
orjson
|
orjson
|
||||||
|
httptools
|
||||||
|
pyllhttp
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ export const ModalLog = (
|
|||||||
}
|
}
|
||||||
) => {
|
) => {
|
||||||
return <Modal size="90%" title={title} opened={opened} onClose={close} centered>
|
return <Modal size="90%" title={title} opened={opened} onClose={close} centered>
|
||||||
<ScrollArea h={500} style={{ minWidth: "100%",whiteSpace: "pre-wrap"}}>
|
<ScrollArea h={500} style={{ maxWidth: "100%",whiteSpace: "break-spaces"}} scrollbars="y">
|
||||||
<Code block mih={500}>{data}</Code>
|
<Code block mih={500} style={{ maxWidth: "100%",whiteSpace: "break-spaces"}}>{data}</Code>
|
||||||
</ScrollArea>
|
</ScrollArea>
|
||||||
</Modal>
|
</Modal>
|
||||||
}
|
}
|
||||||
@@ -2,6 +2,7 @@ import { IoIosWarning } from "react-icons/io"
|
|||||||
import { socketio, WARNING_NFPROXY_TIME_LIMIT } from "../../js/utils"
|
import { socketio, WARNING_NFPROXY_TIME_LIMIT } from "../../js/utils"
|
||||||
import { Tooltip } from "@mantine/core"
|
import { Tooltip } from "@mantine/core"
|
||||||
import { useEffect, useState } from "react"
|
import { useEffect, useState } from "react"
|
||||||
|
import { round } from "@mantine/core/lib/components/ColorPicker/converters/parsers"
|
||||||
|
|
||||||
|
|
||||||
export const ExceptionWarning = ({ service_id }: { service_id: string }) => {
|
export const ExceptionWarning = ({ service_id }: { service_id: string }) => {
|
||||||
@@ -17,9 +18,25 @@ export const ExceptionWarning = ({ service_id }: { service_id: string }) => {
|
|||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
|
const [_time, setTime] = useState(new Date());
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const interval = setInterval(() => {
|
||||||
|
setTime(new Date());
|
||||||
|
}, 1000);
|
||||||
|
|
||||||
|
return () => clearInterval(interval);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const deltaTime = new Date().getTime()-lastExceptionTimestamp
|
||||||
|
const minutes = Math.floor(deltaTime/(1000*60))
|
||||||
|
const seconds = Math.floor(deltaTime/1000)%60
|
||||||
|
|
||||||
|
const deltaStringTime = `${minutes.toString().length>1?minutes:"0"+minutes}:${seconds.toString().length>1?seconds:"0"+seconds}`
|
||||||
|
|
||||||
return <>
|
return <>
|
||||||
{(new Date().getTime()-lastExceptionTimestamp <= WARNING_NFPROXY_TIME_LIMIT)?
|
{(new Date().getTime()-lastExceptionTimestamp <= WARNING_NFPROXY_TIME_LIMIT)?
|
||||||
<Tooltip label={`There was an exception less than ${WARNING_NFPROXY_TIME_LIMIT/(1000*60)} minutes ago: check the logs`} color="yellow">
|
<Tooltip label={`There was an exception less than ${deltaStringTime} minutes ago: check the logs`} color="yellow">
|
||||||
<IoIosWarning size={30} style={{ color: "yellow" }} />
|
<IoIosWarning size={30} style={{ color: "yellow" }} />
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
:null}
|
:null}
|
||||||
|
|||||||
@@ -96,3 +96,77 @@ export const nfproxy = {
|
|||||||
return status === "ok"?undefined:status
|
return status === "ok"?undefined:status
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export const EXAMPLE_PYFILTER = `# This in an example of a filter file with http protocol
|
||||||
|
|
||||||
|
# From here we can import the DataTypes that we want to use:
|
||||||
|
# The data type must be specified in the filter functions
|
||||||
|
# And will also interally be used to decide when call some filters and how aggregate data
|
||||||
|
from firegex.nfproxy.params import RawPacket
|
||||||
|
|
||||||
|
# global context in this execution is dedicated to a single TCP stream
|
||||||
|
# - This code will be executed once at the TCP stream start
|
||||||
|
# - The filter will be called for each packet in the stream
|
||||||
|
# - You can store in global context some data you need, but exceeding with data stored could be dangerous
|
||||||
|
# - At the end of the stream the global context will be destroyed
|
||||||
|
|
||||||
|
from firegex.nfproxy import pyfilter
|
||||||
|
# pyfilter is a decorator, this will make the function become an effective filter and must have parameters with a specified type
|
||||||
|
|
||||||
|
from firegex.nfproxy import REJECT, ACCEPT, UNSTABLE_MANGLE, DROP
|
||||||
|
# - The filter must return one of the following values:
|
||||||
|
# - ACCEPT: The packet will be accepted
|
||||||
|
# - REJECT: The packet will be rejected (will be activated a mechanism to send a FIN packet and drop all data in the stream)
|
||||||
|
# - UNSTABLE_MANGLE: The packet will be mangled and accepted
|
||||||
|
# - DROP: All the packets in this stream will be easly dropped
|
||||||
|
|
||||||
|
# If you want, you can use print to debug your filters, but this could slow down the filter
|
||||||
|
|
||||||
|
# Filter names must be unique and are specified by the name of the function wrapped by the decorator
|
||||||
|
@pyfilter
|
||||||
|
# This function will handle only a RawPacket object, this is the lowest level of the packet abstraction
|
||||||
|
def strange_filter(packet:RawPacket):
|
||||||
|
# Mangling packets can be dangerous, due to instability of the internal TCP state mangling done by the filter below
|
||||||
|
# Also is not garanteed that l4_data is the same of the packet data:
|
||||||
|
# packet data is the assembled TCP stream, l4_data is the TCP payload of the packet in the nfqueue
|
||||||
|
# Unorder packets in TCP are accepted by default, and python is not called in this case
|
||||||
|
# For this reason mangling will be only available RawPacket: higher level data abstraction will be read-only
|
||||||
|
if b"TEST_MANGLING" in packet.l4_data:
|
||||||
|
# It's possible to change teh raw_packet and l4_data values for mangling the packet, data is immutable instead
|
||||||
|
packet.l4_data = packet.l4_data.replace(b"TEST", b"UNSTABLE")
|
||||||
|
return UNSTABLE_MANGLE
|
||||||
|
# Drops the traffic
|
||||||
|
if b"BAD DATA 1" in packet.data:
|
||||||
|
return DROP
|
||||||
|
# Rejects the traffic
|
||||||
|
if b"BAD DATA 2" in packet.data:
|
||||||
|
return REJECT
|
||||||
|
# Accepts the traffic (default if None is returned)
|
||||||
|
return ACCEPT
|
||||||
|
|
||||||
|
# Example with a higher level of abstraction
|
||||||
|
@pyfilter
|
||||||
|
def http_filter(http:HTTPRequest):
|
||||||
|
if http.method == "GET" and "test" in http.url:
|
||||||
|
return REJECT
|
||||||
|
|
||||||
|
# ADVANCED OPTIONS
|
||||||
|
# You can specify some additional options on the streaming managment
|
||||||
|
# pyproxy will automatically store all the packets (already ordered by the c++ binary):
|
||||||
|
#
|
||||||
|
# If the stream is too big, you can specify what actions to take:
|
||||||
|
# This can be done defining some variables in the global context
|
||||||
|
# - FGEX_STREAM_MAX_SIZE: The maximum size of the stream in bytes (default 1MB)
|
||||||
|
# NOTE: the stream size is calculated by the sum of the dimension of the packets in the stream (both directions)
|
||||||
|
# - FGEX_FULL_STREAM_ACTION: The action to do when the stream is full
|
||||||
|
# - FLUSH: Flush the stream and continue to acquire new packets (default)
|
||||||
|
# - DROP: Drop the next stream packets - like a DROP action by filter
|
||||||
|
# - REJECT: Reject the stream and close the connection - like a REJECT action by filter
|
||||||
|
# - ACCEPT: Stops to call pyfilters and accept the traffic
|
||||||
|
|
||||||
|
# Example of a global context
|
||||||
|
FGEX_STREAM_MAX_SIZE = 4096
|
||||||
|
FGEX_FULL_STREAM_ACTION = REJECT
|
||||||
|
# This could be an ideal configuration if we expect to normally have streams with a maximum size of 4KB of traffic
|
||||||
|
`
|
||||||
|
|||||||
@@ -37,9 +37,14 @@ export default function NavBar() {
|
|||||||
<Divider my="xs" />
|
<Divider my="xs" />
|
||||||
<Box style={{flexGrow: 1}} component={ScrollArea} px="xs" mt="xs">
|
<Box style={{flexGrow: 1}} component={ScrollArea} px="xs" mt="xs">
|
||||||
<NavBarButton navigate="nfregex" closeNav={closeNav} name="Netfilter Regex" color="grape" icon={<BsRegex size={19} />} />
|
<NavBarButton navigate="nfregex" closeNav={closeNav} name="Netfilter Regex" color="grape" icon={<BsRegex size={19} />} />
|
||||||
<NavBarButton navigate="nfproxy" closeNav={closeNav} name="Netfilter Proxy" color="lime" icon={<TbPlugConnected size={19} />} />
|
|
||||||
<NavBarButton navigate="firewall" closeNav={closeNav} name="Firewall Rules" color="red" icon={<PiWallLight size={19} />} />
|
<NavBarButton navigate="firewall" closeNav={closeNav} name="Firewall Rules" color="red" icon={<PiWallLight size={19} />} />
|
||||||
<NavBarButton navigate="porthijack" closeNav={closeNav} name="Hijack Port to Proxy" color="blue" icon={<GrDirections size={19} />} />
|
<NavBarButton navigate="porthijack" closeNav={closeNav} name="Hijack Port to Proxy" color="blue" icon={<GrDirections size={19} />} />
|
||||||
|
<Box px="xs" mt="lg">
|
||||||
|
<Title order={5}>Experimental Features 🧪</Title>
|
||||||
|
</Box>
|
||||||
|
<Text></Text>
|
||||||
|
<Divider my="xs" />
|
||||||
|
<NavBarButton navigate="nfproxy" closeNav={closeNav} name="Netfilter Proxy" color="lime" icon={<TbPlugConnected size={19} />} />
|
||||||
</Box>
|
</Box>
|
||||||
|
|
||||||
</AppShell.Navbar>
|
</AppShell.Navbar>
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { Navigate, useNavigate, useParams } from 'react-router-dom';
|
|||||||
import { Badge, Divider, Menu } from '@mantine/core';
|
import { Badge, Divider, Menu } from '@mantine/core';
|
||||||
import { useEffect, useState } from 'react';
|
import { useEffect, useState } from 'react';
|
||||||
import { FaFilter, FaPencilAlt, FaPlay, FaStop } from 'react-icons/fa';
|
import { FaFilter, FaPencilAlt, FaPlay, FaStop } from 'react-icons/fa';
|
||||||
import { nfproxy, nfproxyServiceFilterCodeQuery, nfproxyServicePyfiltersQuery, nfproxyServiceQuery, serviceQueryKey } from '../../components/NFProxy/utils';
|
import { EXAMPLE_PYFILTER, nfproxy, nfproxyServiceFilterCodeQuery, nfproxyServicePyfiltersQuery, nfproxyServiceQuery, serviceQueryKey } from '../../components/NFProxy/utils';
|
||||||
import { MdDoubleArrow } from "react-icons/md"
|
import { MdDoubleArrow } from "react-icons/md"
|
||||||
import YesNoModal from '../../components/YesNoModal';
|
import YesNoModal from '../../components/YesNoModal';
|
||||||
import { errorNotify, isMediumScreen, okNotify, regex_ipv4, socketio } from '../../js/utils';
|
import { errorNotify, isMediumScreen, okNotify, regex_ipv4, socketio } from '../../js/utils';
|
||||||
@@ -45,17 +45,20 @@ export default function ServiceDetailsNFProxy() {
|
|||||||
useEffect(()=>{
|
useEffect(()=>{
|
||||||
if (srv){
|
if (srv){
|
||||||
if (openLogModal){
|
if (openLogModal){
|
||||||
|
logDataSetters.setState([])
|
||||||
socketio.emit("nfproxy-outstream-join", { service: srv });
|
socketio.emit("nfproxy-outstream-join", { service: srv });
|
||||||
socketio.on(`nfproxy-outstream-${srv}`, (data) => {
|
socketio.on(`nfproxy-outstream-${srv}`, (data) => {
|
||||||
logDataSetters.append(data)
|
logDataSetters.append(data)
|
||||||
});
|
});
|
||||||
}else{
|
}else{
|
||||||
logDataSetters.setState([])
|
|
||||||
socketio.emit("nfproxy-outstream-leave", { service: srv });
|
socketio.emit("nfproxy-outstream-leave", { service: srv });
|
||||||
|
socketio.off(`nfproxy-outstream-${srv}`);
|
||||||
|
logDataSetters.setState([])
|
||||||
}
|
}
|
||||||
return () => {
|
return () => {
|
||||||
logDataSetters.setState([])
|
|
||||||
socketio.emit("nfproxy-outstream-leave", { service: srv });
|
socketio.emit("nfproxy-outstream-leave", { service: srv });
|
||||||
|
socketio.off(`nfproxy-outstream-${srv}`);
|
||||||
|
logDataSetters.setState([])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, [openLogModal, srv])
|
}, [openLogModal, srv])
|
||||||
@@ -199,11 +202,13 @@ export default function ServiceDetailsNFProxy() {
|
|||||||
|
|
||||||
{(!filtersList.data || filtersList.data.length == 0)?<>
|
{(!filtersList.data || filtersList.data.length == 0)?<>
|
||||||
<Space h="xl" />
|
<Space h="xl" />
|
||||||
<Title className='center-flex' style={{textAlign:"center"}} order={3}>No filters found! Edit the proxy file</Title>
|
<Title className='center-flex' style={{textAlign:"center"}} order={3}>No filters found! Edit the proxy file, install the firegex client:<Space w="xs" /><Code mb={-4} >pip install fgex</Code></Title>
|
||||||
<Space h="xs" />
|
<Space h="xs" />
|
||||||
<Title className='center-flex' style={{textAlign:"center"}} order={3}>Install the firegex client:<Space w="xs" /><Code mb={-4} >pip install fgex</Code></Title>
|
<Title className='center-flex' style={{textAlign:"center"}} order={3}>Then create a new filter file with the following syntax and upload it here (using the button above)</Title>
|
||||||
<Space h="xs" />
|
<Space h="xs" />
|
||||||
<Title className='center-flex' style={{textAlign:"center"}} order={3}>Then run the command:<Space w="xs" /><Code mb={-4} >fgex nfproxy</Code></Title>
|
<Title className='center-flex' style={{textAlign:"center"}} order={3}>Before upload the filter you can test it using the fgex command installed by the python lib</Title>
|
||||||
|
<Space h="lg" />
|
||||||
|
<CodeHighlight code={EXAMPLE_PYFILTER} language="python" />
|
||||||
</>:<>{filtersList.data?.map( (filterInfo) => <PyFilterView filterInfo={filterInfo} key={filterInfo.name}/>)}</>
|
</>:<>{filtersList.data?.map( (filterInfo) => <PyFilterView filterInfo={filterInfo} key={filterInfo.name}/>)}</>
|
||||||
}
|
}
|
||||||
<YesNoModal
|
<YesNoModal
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { ActionIcon, Badge, Box, FileButton, LoadingOverlay, Space, ThemeIcon, Title, Tooltip } from '@mantine/core';
|
import { ActionIcon, Badge, Box, Code, LoadingOverlay, Space, ThemeIcon, Title, Tooltip } from '@mantine/core';
|
||||||
import { useEffect, useState } from 'react';
|
import { useEffect, useState } from 'react';
|
||||||
import { BsPlusLg } from "react-icons/bs";
|
import { BsPlusLg } from "react-icons/bs";
|
||||||
import { useNavigate, useParams } from 'react-router-dom';
|
import { useNavigate, useParams } from 'react-router-dom';
|
||||||
@@ -7,11 +7,12 @@ import { errorNotify, getErrorMessage, isMediumScreen } from '../../js/utils';
|
|||||||
import AddEditService from '../../components/NFProxy/AddEditService';
|
import AddEditService from '../../components/NFProxy/AddEditService';
|
||||||
import { useQueryClient } from '@tanstack/react-query';
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
import { TbPlugConnected, TbReload } from 'react-icons/tb';
|
import { TbPlugConnected, TbReload } from 'react-icons/tb';
|
||||||
import { nfproxy, nfproxyServiceQuery } from '../../components/NFProxy/utils';
|
import { EXAMPLE_PYFILTER, nfproxy, nfproxyServiceQuery } from '../../components/NFProxy/utils';
|
||||||
import { FaFilter, FaPencilAlt, FaServer } from 'react-icons/fa';
|
import { FaFilter, FaPencilAlt, FaServer } from 'react-icons/fa';
|
||||||
import { MdUploadFile } from "react-icons/md";
|
import { MdUploadFile } from "react-icons/md";
|
||||||
import { notifications } from '@mantine/notifications';
|
import { notifications } from '@mantine/notifications';
|
||||||
import { useFileDialog } from '@mantine/hooks';
|
import { useFileDialog } from '@mantine/hooks';
|
||||||
|
import { CodeHighlight } from '@mantine/code-highlight';
|
||||||
|
|
||||||
|
|
||||||
export default function NFProxy({ children }: { children: any }) {
|
export default function NFProxy({ children }: { children: any }) {
|
||||||
@@ -138,13 +139,27 @@ export default function NFProxy({ children }: { children: any }) {
|
|||||||
<LoadingOverlay visible={services.isLoading} />
|
<LoadingOverlay visible={services.isLoading} />
|
||||||
{(services.data && services.data?.length > 0)?services.data.map( srv => <ServiceRow service={srv} key={srv.service_id} onClick={()=>{
|
{(services.data && services.data?.length > 0)?services.data.map( srv => <ServiceRow service={srv} key={srv.service_id} onClick={()=>{
|
||||||
navigator("/nfproxy/"+srv.service_id)
|
navigator("/nfproxy/"+srv.service_id)
|
||||||
}} />):<><Space h="xl"/> <Title className='center-flex' style={{textAlign:"center"}} order={3}>No services found! Add one clicking the "+" buttons</Title>
|
}} />):<>
|
||||||
<Box className='center-flex'>
|
<Box className='center-flex-row'>
|
||||||
|
<Space h="xl" />
|
||||||
|
<Title className='center-flex' style={{textAlign:"center"}} order={3}>Netfilter proxy is a simulated proxy written using python with a c++ core</Title>
|
||||||
|
<Space h="xs" />
|
||||||
|
<Title className='center-flex' style={{textAlign:"center"}} order={5}>Filters are created using a simple python syntax, infact the first you need to do is to install the firegex lib:<Space w="xs" /><Code mb={-4} >pip install firegex</Code></Title>
|
||||||
|
<Space h="xs" />
|
||||||
|
<Title className='center-flex' style={{textAlign:"center"}} order={5}>Then you can create a new service and write custom filters for the service</Title>
|
||||||
|
<Space h="xs" />
|
||||||
|
<Title className='center-flex' style={{textAlign:"center"}} order={5}>Below there is a description and example about how a pyfilter has to be composed (this example is replicated in every empty service)</Title>
|
||||||
|
<Space h="lg" />
|
||||||
|
<CodeHighlight code={EXAMPLE_PYFILTER} language="python" />
|
||||||
|
<Space h="lg" />
|
||||||
|
<Title className='center-flex' style={{textAlign:"center"}} order={3}>Add your first service</Title>
|
||||||
|
<Space h="lg" />
|
||||||
<Tooltip label="Add a new service" color="blue">
|
<Tooltip label="Add a new service" color="blue">
|
||||||
<ActionIcon color="blue" onClick={()=>setOpen(true)} size="xl" radius="md" variant="filled">
|
<ActionIcon color="blue" onClick={()=>setOpen(true)} size="xl" radius="md" variant="filled">
|
||||||
<BsPlusLg size="20px" />
|
<BsPlusLg size="20px" />
|
||||||
</ActionIcon>
|
</ActionIcon>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
|
|
||||||
</Box>
|
</Box>
|
||||||
</>}
|
</>}
|
||||||
</>}
|
</>}
|
||||||
|
|||||||
Reference in New Issue
Block a user