data handler improves, written test for nfproxy, new option on parsing fail

This commit is contained in:
Domingo Dirutigliano
2025-03-09 22:14:34 +01:00
parent 9dfe229a26
commit 73c40d2f5d
21 changed files with 1077 additions and 306 deletions

View File

@@ -191,6 +191,12 @@ class PktRequest {
size_t total_size;
if (is_ipv6){
delete ipv6;
ipv6 = nullptr;
if (data_size >= 40){ // 40 == fixed size of ipv6 header
// Resetting payload length before parsing to libtins
uint16_t payload_len = htons(data_size-40);
memcpy(((uint8_t *)data)+4, &payload_len, 2);
}
ipv6 = new Tins::IPv6((uint8_t*)data, data_size);
if (tcp){
tcp = ipv6->find_pdu<Tins::TCP>();
@@ -204,6 +210,7 @@ class PktRequest {
total_size = ipv6->size();
}else{
delete ipv4;
ipv4 = nullptr;
ipv4 = new Tins::IP((uint8_t*)data, data_size);
if (tcp){
tcp = ipv4->find_pdu<Tins::TCP>();
@@ -311,7 +318,10 @@ class PktRequest {
set_packet(raw_pkt, raw_pkt_size);
reserialize();
action = FilterAction::MANGLE;
}catch(...){
}catch(const std::exception& e){
#ifdef DEBUG
cerr << "[DEBUG] [PktRequest.mangle_custom_pkt] " << e.what() << endl;
#endif
action = FilterAction::DROP;
}
perfrom_action(false);

View File

@@ -148,7 +148,7 @@ class PyProxyQueue: public NfQueue::ThreadNfQueue<PyProxyQueue> {
case PyFilterResponse::MANGLE:
pkt->mangle_custom_pkt(result.mangled_packet->c_str(), result.mangled_packet->size());
if (pkt->get_action() == NfQueue::FilterAction::DROP){
cerr << "[error] [filter_action] Failed to mangle: the packet sent is not serializzable... the packet was dropped" << endl;
cerr << "[ERROR] [filter_action] Failed to mangle: Malformed Packet... the packet was dropped" << endl;
print_blocked_reason(*result.filter_match_by);
print_exception_reason();
}else{

View File

@@ -38,6 +38,7 @@ class SettingsForm(BaseModel):
class PyFilterModel(BaseModel):
name: str
service_id: str
blocked_packets: int
edited_packets: int
active: bool
@@ -55,7 +56,6 @@ class ServiceAddResponse(BaseModel):
class SetPyFilterForm(BaseModel):
code: str
sid: str|None = None
app = APIRouter()
@@ -71,12 +71,13 @@ db = SQLite('db/nft-pyfilters.db', {
'fail_open': 'BOOLEAN NOT NULL CHECK (fail_open IN (0, 1)) DEFAULT 1',
},
'pyfilter': {
'name': 'VARCHAR(100) PRIMARY KEY',
'name': 'VARCHAR(100) NOT NULL',
'service_id': 'VARCHAR(100) NOT NULL',
'blocked_packets': 'INTEGER UNSIGNED NOT NULL DEFAULT 0',
'edited_packets': 'INTEGER UNSIGNED NOT NULL DEFAULT 0',
'service_id': 'VARCHAR(100) NOT NULL',
'active' : 'BOOLEAN NOT NULL CHECK (active IN (0, 1)) DEFAULT 1',
'FOREIGN KEY (service_id)':'REFERENCES services (service_id)',
'PRIMARY KEY': '(name, service_id)'
},
'QUERY':[
"CREATE UNIQUE INDEX IF NOT EXISTS unique_services ON services (port, ip_int, l4_proto);",
@@ -256,38 +257,38 @@ async def get_service_pyfilter_list(service_id: str):
raise HTTPException(status_code=400, detail="This service does not exists!")
return db.query("""
SELECT
name, blocked_packets, edited_packets, active
name, blocked_packets, edited_packets, active, service_id
FROM pyfilter WHERE service_id = ?;
""", service_id)
@app.get('/pyfilters/{filter_name}', response_model=PyFilterModel)
async def get_pyfilter_by_id(filter_name: str):
@app.get('/services/{service_id}/pyfilters/{filter_name}', response_model=PyFilterModel)
async def get_pyfilter_by_id(service_id: str, filter_name: str):
"""Get pyfilter info using his id"""
res = db.query("""
SELECT
name, blocked_packets, edited_packets, active
FROM pyfilter WHERE name = ?;
""", filter_name)
name, blocked_packets, edited_packets, active, service_id
FROM pyfilter WHERE name = ? AND service_id = ?;
""", filter_name, service_id)
if len(res) == 0:
raise HTTPException(status_code=400, detail="This filter does not exists!")
return res[0]
@app.post('/pyfilters/{filter_name}/enable', response_model=StatusMessageModel)
async def pyfilter_enable(filter_name: str):
@app.post('/services/{service_id}/pyfilters/{filter_name}/enable', response_model=StatusMessageModel)
async def pyfilter_enable(service_id: str, filter_name: str):
"""Request the enabling of a pyfilter"""
res = db.query('SELECT * FROM pyfilter WHERE name = ?;', filter_name)
res = db.query('SELECT * FROM pyfilter WHERE name = ? AND service_id = ?;', filter_name, service_id)
if len(res) != 0:
db.query('UPDATE pyfilter SET active=1 WHERE name = ?;', filter_name)
db.query('UPDATE pyfilter SET active=1 WHERE name = ? AND service_id = ?;', filter_name, service_id)
await firewall.get(res[0]["service_id"]).update_filters()
await refresh_frontend()
return {'status': 'ok'}
@app.post('/pyfilters/{filter_name}/disable', response_model=StatusMessageModel)
async def pyfilter_disable(filter_name: str):
@app.post('/services/{service_id}/pyfilters/{filter_name}/disable', response_model=StatusMessageModel)
async def pyfilter_disable(service_id: str, filter_name: str):
"""Request the deactivation of a pyfilter"""
res = db.query('SELECT * FROM pyfilter WHERE name = ?;', filter_name)
res = db.query('SELECT * FROM pyfilter WHERE name = ? AND service_id = ?;', filter_name, service_id)
if len(res) != 0:
db.query('UPDATE pyfilter SET active=0 WHERE name = ?;', filter_name)
db.query('UPDATE pyfilter SET active=0 WHERE name = ? AND service_id = ?;', filter_name, service_id)
await firewall.get(res[0]["service_id"]).update_filters()
await refresh_frontend()
return {'status': 'ok'}
@@ -312,8 +313,8 @@ async def add_new_service(form: ServiceAddForm):
await refresh_frontend()
return {'status': 'ok', 'service_id': srv_id}
@app.put('/services/{service_id}/pyfilters/code', response_model=StatusMessageModel)
async def set_pyfilters(service_id: str, form: SetPyFilterForm):
@app.put('/services/{service_id}/code', response_model=StatusMessageModel)
async def set_pyfilters_code(service_id: str, form: SetPyFilterForm):
"""Set the python filter for a service"""
service = db.query("SELECT service_id, proto FROM services WHERE service_id = ?;", service_id)
if len(service) == 0:
@@ -356,10 +357,10 @@ async def set_pyfilters(service_id: str, form: SetPyFilterForm):
return {'status': 'ok'}
@app.get('/services/{service_id}/pyfilters/code', response_class=PlainTextResponse)
async def get_pyfilters(service_id: str):
@app.get('/services/{service_id}/code', response_class=PlainTextResponse)
async def get_pyfilters_code(service_id: str):
"""Get the python filter for a service"""
if not db.query("SELECT 1 FROM services s WHERE s.service_id = ?;", service_id):
if not db.query("SELECT 1 FROM services WHERE service_id = ?;", service_id):
raise HTTPException(status_code=400, detail="This service does not exists!")
try:
with open(f"db/nfproxy_filters/{service_id}.py") as f:

View File

@@ -94,7 +94,6 @@ This handler will be called twice: one for the request headers and one for the r
- headers: dict - The headers of the request
- user_agent: str - The user agent of the request
- content_encoding: str - The content encoding of the request
- has_begun: bool - It's true if the request has begun
- body: bytes - The body of the request
- headers_complete: bool - It's true if the headers are complete
- message_complete: bool - It's true if the message is complete
@@ -122,7 +121,6 @@ This handler will be called twice: one for the response headers and one for the
- headers: dict - The headers of the response
- user_agent: str - The user agent of the response
- content_encoding: str - The content encoding of the response
- has_begun: bool - It's true if the response has begun
- body: bytes - The body of the response
- headers_complete: bool - It's true if the headers are complete
- message_complete: bool - It's true if the message is complete

View File

@@ -76,11 +76,8 @@ def handle_packet(glob: dict) -> None:
cache_call = {} # Cache of the data handler calls
cache_call[RawPacket] = internal_data.current_pkt
final_result = Action.ACCEPT
result = PacketHandlerResult(glob)
func_name = None
mangled_packet = None
for filter in internal_data.filter_call_info:
final_params = []
skip_call = False
@@ -116,24 +113,37 @@ def handle_packet(glob: dict) -> None:
if skip_call:
continue
res = context_call(glob, filter.func, *final_params)
# Create an iterator with all the calls to be done
def try_to_call(params:list):
is_base_call = True
for i in range(len(params)):
if isinstance(params[i], list):
new_params = params.copy()
for ele in params[i]:
new_params[i] = ele
for ele in try_to_call(new_params):
yield ele
is_base_call = False
break
if is_base_call:
yield context_call(glob, filter.func, *params)
if res is None:
continue #ACCEPTED
if not isinstance(res, Action):
raise Exception(f"Invalid return type {type(res)} for function {filter.name}")
if res == Action.MANGLE:
mangled_packet = internal_data.current_pkt.raw_packet
if res != Action.ACCEPT:
func_name = filter.name
final_result = res
break
for res in try_to_call(final_params):
if res is None:
continue #ACCEPTED
if not isinstance(res, Action):
raise Exception(f"Invalid return type {type(res)} for function {filter.name}")
if res == Action.MANGLE:
result.matched_by = filter.name
result.mangled_packet = internal_data.current_pkt.raw_packet
result.action = Action.MANGLE
elif res != Action.ACCEPT:
result.matched_by = filter.name
result.action = res
result.mangled_packet = None
return result.set_result()
result.action = final_result
result.matched_by = func_name
result.mangled_packet = mangled_packet
return result.set_result()
return result.set_result() # Will be MANGLE or ACCEPT
def compile(glob:dict) -> None:
@@ -148,13 +158,12 @@ def compile(glob:dict) -> None:
if "FGEX_STREAM_MAX_SIZE" in glob and int(glob["FGEX_STREAM_MAX_SIZE"]) > 0:
internal_data.stream_max_size = int(glob["FGEX_STREAM_MAX_SIZE"])
else:
internal_data.stream_max_size = 1*8e20 # 1MB default value
if "FGEX_FULL_STREAM_ACTION" in glob and isinstance(glob["FGEX_FULL_STREAM_ACTION"], FullStreamAction):
internal_data.full_stream_action = glob["FGEX_FULL_STREAM_ACTION"]
else:
internal_data.full_stream_action = FullStreamAction.FLUSH
if "FGEX_INVALID_ENCODING_ACTION" in glob and isinstance(glob["FGEX_INVALID_ENCODING_ACTION"], Action):
internal_data.invalid_encoding_action = glob["FGEX_INVALID_ENCODING_ACTION"]
PacketHandlerResult(glob).reset_result()

View File

@@ -1,5 +1,5 @@
from firegex.nfproxy.internals.models import FilterHandler
from firegex.nfproxy.internals.models import FullStreamAction
from firegex.nfproxy.internals.models import FullStreamAction, ExceptionAction
class RawPacket:
"class rapresentation of the nfqueue packet sent in python context by the c++ core"
@@ -120,23 +120,39 @@ class DataStreamCtx:
@property
def stream_max_size(self) -> int:
if "stream_max_size" not in self.__data.keys():
self.__data["stream_max_size"] = 1*8e20
self.__data["stream_max_size"] = 1*8e20 # 1MB default value
return self.__data.get("stream_max_size")
@stream_max_size.setter
def stream_max_size(self, v: int):
if not isinstance(v, int):
raise Exception("Invalid data type, data MUST be of type int")
self.__data["stream_max_size"] = v
@property
def full_stream_action(self) -> FullStreamAction:
if "full_stream_action" not in self.__data.keys():
self.__data["full_stream_action"] = "flush"
self.__data["full_stream_action"] = FullStreamAction.FLUSH
return self.__data.get("full_stream_action")
@full_stream_action.setter
def full_stream_action(self, v: FullStreamAction):
if not isinstance(v, FullStreamAction):
raise Exception("Invalid data type, data MUST be of type FullStreamAction")
self.__data["full_stream_action"] = v
@property
def invalid_encoding_action(self) -> ExceptionAction:
if "invalid_encoding_action" not in self.__data.keys():
self.__data["invalid_encoding_action"] = ExceptionAction.REJECT
return self.__data.get("invalid_encoding_action")
@invalid_encoding_action.setter
def invalid_encoding_action(self, v: ExceptionAction):
if not isinstance(v, ExceptionAction):
raise Exception("Invalid data type, data MUST be of type ExceptionAction")
self.__data["invalid_encoding_action"] = v
@property
def data_handler_context(self) -> dict:
if "data_handler_context" not in self.__data.keys():

View File

@@ -13,3 +13,4 @@ class RejectConnection(Exception):
class StreamFullReject(Exception):
"raise this exception if you want to reject the connection due to full stream"

View File

@@ -8,6 +8,13 @@ class Action(Enum):
REJECT = 2
MANGLE = 3
class ExceptionAction(Enum):
"""Action to be taken by the filter when an exception occurs (used in some cases)"""
ACCEPT = 0
DROP = 1
REJECT = 2
NOACTION = 3
class FullStreamAction(Enum):
"""Action to be taken by the filter when the stream is full"""
FLUSH = 0
@@ -40,5 +47,3 @@ class PacketHandlerResult:
def reset_result(self) -> None:
self.glob["__firegex_pyfilter_result"] = None

View File

@@ -1,101 +1,143 @@
import pyllhttp
from firegex.nfproxy.internals.exceptions import NotReadyToRun
from firegex.nfproxy.internals.data import DataStreamCtx
from firegex.nfproxy.internals.exceptions import StreamFullDrop, StreamFullReject
from firegex.nfproxy.internals.models import FullStreamAction
from firegex.nfproxy.internals.exceptions import StreamFullDrop, StreamFullReject, RejectConnection, DropPacket
from firegex.nfproxy.internals.models import FullStreamAction, ExceptionAction
from dataclasses import dataclass, field
from collections import deque
from typing import Type
@dataclass
class InternalHTTPMessage:
"""Internal class to handle HTTP messages"""
url: str|None = field(default=None)
headers: dict[str, str] = field(default_factory=dict)
lheaders: dict[str, str] = field(default_factory=dict) # lowercase copy of the headers
body: bytes|None = field(default=None)
headers_complete: bool = field(default=False)
message_complete: bool = field(default=False)
status: str|None = field(default=None)
total_size: int = field(default=0)
user_agent: str = field(default_factory=str)
content_encoding: str = field(default=str)
content_type: str = field(default=str)
keep_alive: bool = field(default=False)
should_upgrade: bool = field(default=False)
http_version: str = field(default=str)
method: str = field(default=str)
content_length: int = field(default=0)
stream: bytes = field(default_factory=bytes)
@dataclass
class InternalHttpBuffer:
"""Internal class to handle HTTP messages"""
_url_buffer: bytes = field(default_factory=bytes)
_header_fields: dict[bytes, bytes] = field(default_factory=dict)
_body_buffer: bytes = field(default_factory=bytes)
_status_buffer: bytes = field(default_factory=bytes)
_current_header_field: bytes = field(default_factory=bytes)
_current_header_value: bytes = field(default_factory=bytes)
class InternalCallbackHandler():
url: str|None = None
_url_buffer: bytes = b""
headers: dict[str, str] = {}
lheaders: dict[str, str] = {} # Lowercase headers
_header_fields: dict[bytes, bytes] = {}
has_begun: bool = False
body: bytes = None
_body_buffer: bytes = b""
headers_complete: bool = False
message_complete: bool = False
status: str|None = None
_status_buffer: bytes = b""
_current_header_field = b""
_current_header_value = b""
_save_body = True
total_size = 0
buffers = InternalHttpBuffer()
msg = InternalHTTPMessage()
save_body = True
raised_error = False
has_begun = False
messages: deque[InternalHTTPMessage] = deque()
def reset_data(self):
self.msg = InternalHTTPMessage()
self.buffers = InternalHttpBuffer()
self.messages.clear()
def on_message_begin(self):
self.buffers = InternalHttpBuffer()
self.msg = InternalHTTPMessage()
self.has_begun = True
def on_url(self, url):
self.total_size += len(url)
self._url_buffer += url
self.buffers._url_buffer += url
self.msg.total_size += len(url)
def on_url_complete(self):
self.url = self._url_buffer.decode(errors="ignore")
self._url_buffer = None
self.msg.url = self.buffers._url_buffer.decode(errors="ignore")
self.buffers._url_buffer = b""
def on_status(self, status: bytes):
self.msg.total_size += len(status)
self.buffers._status_buffer += status
def on_status_complete(self):
self.msg.status = self.buffers._status_buffer.decode(errors="ignore")
self.buffers._status_buffer = b""
def on_header_field(self, field):
self.total_size += len(field)
self._current_header_field += field
self.msg.total_size += len(field)
self.buffers._current_header_field += field
def on_header_field_complete(self):
self._current_header_field = self._current_header_field
pass # Nothing to do
def on_header_value(self, value):
self.total_size += len(value)
self._current_header_value += value
self.msg.total_size += len(value)
self.buffers._current_header_value += value
def on_header_value_complete(self):
if self._current_header_value is not None and self._current_header_field is not None:
self._header_fields[self._current_header_field.decode(errors="ignore")] = self._current_header_value.decode(errors="ignore")
self._current_header_field = b""
self._current_header_value = b""
if self.buffers._current_header_field:
self.buffers._header_fields[self.buffers._current_header_field.decode(errors="ignore")] = self.buffers._current_header_value.decode(errors="ignore")
self.buffers._current_header_field = b""
self.buffers._current_header_value = b""
def on_headers_complete(self):
self.headers_complete = True
self.headers = self._header_fields
self.lheaders = {k.lower(): v for k, v in self._header_fields.items()}
self._header_fields = {}
self._current_header_field = b""
self._current_header_value = b""
self.msg.headers = self.buffers._header_fields
self.msg.lheaders = {k.lower(): v for k, v in self.buffers._header_fields.items()}
self.buffers._header_fields = {}
self.buffers._current_header_field = b""
self.buffers._current_header_value = b""
self.msg.headers_complete = True
self.msg.method = self.method_parsed
self.msg.content_length = self.content_length_parsed
self.msg.should_upgrade = self.should_upgrade
self.msg.keep_alive = self.keep_alive
self.msg.http_version = self.http_version
self.msg.content_type = self.content_type
self.msg.content_encoding = self.content_encoding
self.msg.user_agent = self.user_agent
def on_body(self, body: bytes):
if self._save_body:
self.total_size += len(body)
self._body_buffer += body
if self.save_body:
self.msg.total_size += len(body)
self.buffers._body_buffer += body
def on_message_complete(self):
self.body = self._body_buffer
self._body_buffer = b""
self.msg.body = self.buffers._body_buffer
self.buffers._body_buffer = b""
try:
if "gzip" in self.content_encoding.lower():
import gzip
import io
with gzip.GzipFile(fileobj=io.BytesIO(self.body)) as f:
self.body = f.read()
with gzip.GzipFile(fileobj=io.BytesIO(self.msg.body)) as f:
self.msg.body = f.read()
except Exception as e:
print(f"Error decompressing gzip: {e}: skipping", flush=True)
self.message_complete = True
def on_status(self, status: bytes):
self.total_size += len(status)
self._status_buffer += status
def on_status_complete(self):
self.status = self._status_buffer.decode(errors="ignore")
self._status_buffer = b""
self.msg.message_complete = True
self.has_begun = False
if not self._packet_to_stream():
self.messages.append(self.msg)
@property
def user_agent(self) -> str:
return self.lheaders.get("user-agent", "")
return self.msg.lheaders.get("user-agent", "")
@property
def content_encoding(self) -> str:
return self.lheaders.get("content-encoding", "")
return self.msg.lheaders.get("content-encoding", "")
@property
def content_type(self) -> str:
return self.lheaders.get("content-type", "")
return self.msg.lheaders.get("content-type", "")
@property
def keep_alive(self) -> bool:
@@ -107,16 +149,49 @@ class InternalCallbackHandler():
@property
def http_version(self) -> str:
return f"{self.major}.{self.minor}"
if self.major and self.minor:
return f"{self.major}.{self.minor}"
else:
return ""
@property
def method_parsed(self) -> str:
return self.method.decode(errors="ignore")
return self.method
@property
def total_size(self) -> int:
"""Total size used by the parser"""
tot = self.msg.total_size
for msg in self.messages:
tot += msg.total_size
return tot
@property
def content_length_parsed(self) -> int:
return self.content_length
def _packet_to_stream(self):
return self.should_upgrade and self.save_body
def parse_data(self, data: bytes):
if self._packet_to_stream(): # This is a websocket upgrade!
self.msg.message_complete = True # The message is complete but becomed a stream, so need to be called every time a new packet is received
self.msg.total_size += len(data)
self.msg.stream += data #buffering stream
else:
try:
self.execute(data)
except Exception as e:
self.raised_error = True
print(f"Error parsing HTTP packet: {e} with data {data}", flush=True)
raise e
def pop_message(self):
return self.messages.popleft()
def __repr__(self):
return f"<InternalCallbackHandler msg={self.msg} buffers={self.buffers} save_body={self.save_body} raised_error={self.raised_error} has_begun={self.has_begun} messages={self.messages}>"
class InternalHttpRequest(InternalCallbackHandler, pyllhttp.Request):
def __init__(self):
@@ -131,11 +206,15 @@ class InternalHttpResponse(InternalCallbackHandler, pyllhttp.Response):
class InternalBasicHttpMetaClass:
"""Internal class to handle HTTP requests and responses"""
def __init__(self):
self._parser: InternalHttpRequest|InternalHttpResponse
self._headers_were_set = False
def __init__(self, parser: InternalHttpRequest|InternalHttpResponse, msg: InternalHTTPMessage):
self._parser = parser
self.stream = b""
self.raised_error = False
self._message: InternalHTTPMessage|None = msg
self._contructor_hook()
def _contructor_hook(self):
pass
@property
def total_size(self) -> int:
@@ -145,116 +224,98 @@ class InternalBasicHttpMetaClass:
@property
def url(self) -> str|None:
"""URL of the message"""
return self._parser.url
return self._message.url
@property
def headers(self) -> dict[str, str]:
"""Headers of the message"""
return self._parser.headers
return self._message.headers
@property
def user_agent(self) -> str:
"""User agent of the message"""
return self._parser.user_agent
return self._message.user_agent
@property
def content_encoding(self) -> str:
"""Content encoding of the message"""
return self._parser.content_encoding
@property
def has_begun(self) -> bool:
"""If the message has begun"""
return self._parser.has_begun
return self._message.content_encoding
@property
def body(self) -> bytes:
"""Body of the message"""
return self._parser.body
return self._message.body
@property
def headers_complete(self) -> bool:
"""If the headers are complete"""
return self._parser.headers_complete
return self._message.headers_complete
@property
def message_complete(self) -> bool:
"""If the message is complete"""
return self._parser.message_complete
return self._message.message_complete
@property
def http_version(self) -> str:
"""HTTP version of the message"""
return self._parser.http_version
return self._message.http_version
@property
def keep_alive(self) -> bool:
"""If the message should keep alive"""
return self._parser.keep_alive
return self._message.keep_alive
@property
def should_upgrade(self) -> bool:
"""If the message should upgrade"""
return self._parser.should_upgrade
return self._message.should_upgrade
@property
def content_length(self) -> int|None:
"""Content length of the message"""
return self._parser.content_length_parsed
return self._message.content_length
def get_header(self, header: str, default=None) -> str:
"""Get a header from the message without caring about the case"""
return self._parser.lheaders.get(header.lower(), default)
return self._message.lheaders.get(header.lower(), default)
def _packet_to_stream(self, internal_data: DataStreamCtx):
return self.should_upgrade and self._parser._save_body
@staticmethod
def _associated_parser_class() -> Type[InternalHttpRequest]|Type[InternalHttpResponse]:
raise NotImplementedError()
def _fetch_current_packet(self, internal_data: DataStreamCtx):
if self._packet_to_stream(internal_data): # This is a websocket upgrade!
self._parser.total_size += len(internal_data.current_pkt.data)
self.stream += internal_data.current_pkt.data
else:
try:
self._parser.execute(internal_data.current_pkt.data)
if not self._parser.message_complete and self._parser.headers_complete and len(self._parser._body_buffer) == self._parser.content_length_parsed:
self._parser.on_message_complete()
except Exception as e:
self.raised_error = True
print(f"Error parsing HTTP packet: {e} {internal_data.current_pkt}", self, flush=True)
raise e
#It's called the first time if the headers are complete, and second time with body complete
def _after_fetch_callable_checks(self, internal_data: DataStreamCtx):
if self._parser.headers_complete and not self._headers_were_set:
self._headers_were_set = True
return True
return self._parser.message_complete or self.should_upgrade
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
@staticmethod
def _before_fetch_callable_checks(internal_data: DataStreamCtx):
return True
def _trigger_remove_data(self, internal_data: DataStreamCtx):
return self.message_complete and not self.should_upgrade
@classmethod
def _fetch_packet(cls, internal_data: DataStreamCtx):
if internal_data.current_pkt is None or internal_data.current_pkt.is_tcp is False:
raise NotReadyToRun()
datahandler:InternalBasicHttpMetaClass = internal_data.data_handler_context.get(cls, None)
if datahandler is None or datahandler.raised_error:
datahandler = cls()
internal_data.data_handler_context[cls] = datahandler
ParserType = cls._associated_parser_class()
if not datahandler._before_fetch_callable_checks(internal_data):
parser = internal_data.data_handler_context.get(cls, None)
if parser is None or parser.raised_error:
parser: InternalHttpRequest|InternalHttpResponse = ParserType()
internal_data.data_handler_context[cls] = parser
if not cls._before_fetch_callable_checks(internal_data):
raise NotReadyToRun()
# Memory size managment
if datahandler.total_size+len(internal_data.current_pkt.data) > internal_data.stream_max_size:
if parser.total_size+len(internal_data.current_pkt.data) > internal_data.stream_max_size:
match internal_data.full_stream_action:
case FullStreamAction.FLUSH:
datahandler = cls()
internal_data.data_handler_context[cls] = datahandler
# Deleting parser and re-creating it
parser.messages.clear()
parser.msg.total_size -= len(parser.msg.stream)
parser.msg.stream = b""
parser.msg.total_size -= len(parser.msg.body)
parser.msg.body = b""
print("[WARNING] Flushing stream", flush=True)
if parser.total_size+len(internal_data.current_pkt.data) > internal_data.stream_max_size:
parser.reset_data()
case FullStreamAction.REJECT:
raise StreamFullReject()
case FullStreamAction.DROP:
@@ -262,16 +323,41 @@ class InternalBasicHttpMetaClass:
case FullStreamAction.ACCEPT:
raise NotReadyToRun()
datahandler._fetch_current_packet(internal_data)
headers_were_set = parser.msg.headers_complete
try:
parser.parse_data(internal_data.current_pkt.data)
except Exception as e:
match internal_data.invalid_encoding_action:
case ExceptionAction.REJECT:
raise RejectConnection()
case ExceptionAction.DROP:
raise DropPacket()
case ExceptionAction.NOACTION:
raise e
case ExceptionAction.ACCEPT:
raise NotReadyToRun()
if not datahandler._after_fetch_callable_checks(internal_data):
messages_tosend:list[InternalHTTPMessage] = []
for i in range(len(parser.messages)):
messages_tosend.append(parser.pop_message())
if len(messages_tosend) > 0:
headers_were_set = False # New messages completed so the current message headers were not set in this case
if not headers_were_set and parser.msg.headers_complete:
messages_tosend.append(parser.msg) # Also the current message needs to be sent due to complete headers
if headers_were_set and parser.msg.message_complete and parser.msg.should_upgrade and parser.save_body:
messages_tosend.append(parser.msg) # Also the current message needs to beacase a websocket stream is going on
messages_to_call = len(messages_tosend)
if messages_to_call == 0:
raise NotReadyToRun()
elif messages_to_call == 1:
return cls(parser, messages_tosend[0])
if datahandler._trigger_remove_data(internal_data):
if internal_data.data_handler_context.get(cls):
del internal_data.data_handler_context[cls]
return datahandler
return [cls(parser, ele) for ele in messages_tosend]
class HttpRequest(InternalBasicHttpMetaClass):
"""
@@ -279,22 +365,21 @@ class HttpRequest(InternalBasicHttpMetaClass):
This data handler will be called twice, first with the headers complete, and second with the body complete
"""
def __init__(self):
super().__init__()
# These will be used in the metaclass
self._parser: InternalHttpRequest = InternalHttpRequest()
self._headers_were_set = False
@staticmethod
def _associated_parser_class() -> Type[InternalHttpRequest]:
return InternalHttpRequest
@staticmethod
def _before_fetch_callable_checks(internal_data: DataStreamCtx):
return internal_data.current_pkt.is_input
@property
def method(self) -> bytes:
"""Method of the request"""
return self._parser.method_parsed
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return internal_data.current_pkt.is_input
return self._parser.msg.method
def __repr__(self):
return f"<HttpRequest method={self.method} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} has_begun={self.has_begun} content_length={self.content_length} stream={self.stream}>"
return f"<HttpRequest method={self.method} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} content_length={self.content_length} stream={self.stream}>"
class HttpResponse(InternalBasicHttpMetaClass):
"""
@@ -302,21 +387,21 @@ class HttpResponse(InternalBasicHttpMetaClass):
This data handler will be called twice, first with the headers complete, and second with the body complete
"""
def __init__(self):
super().__init__()
self._parser: InternalHttpResponse = InternalHttpResponse()
self._headers_were_set = False
@staticmethod
def _associated_parser_class() -> Type[InternalHttpResponse]:
return InternalHttpResponse
@staticmethod
def _before_fetch_callable_checks(internal_data: DataStreamCtx):
return not internal_data.current_pkt.is_input
@property
def status_code(self) -> int:
"""Status code of the response"""
return self._parser.status
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return not internal_data.current_pkt.is_input
return self._parser.msg.status
def __repr__(self):
return f"<HttpResponse status_code={self.status_code} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} has_begun={self.has_begun} content_length={self.content_length} stream={self.stream}>"
return f"<HttpResponse status_code={self.status_code} url={self.url} headers={self.headers} body={self.body} http_version={self.http_version} keep_alive={self.keep_alive} should_upgrade={self.should_upgrade} headers_complete={self.headers_complete} message_complete={self.message_complete} content_length={self.content_length} stream={self.stream}>"
class HttpRequestHeader(HttpRequest):
"""
@@ -324,18 +409,8 @@ class HttpRequestHeader(HttpRequest):
This data handler will be called only once, the headers are complete, the body will be empty and not buffered
"""
def __init__(self):
super().__init__()
self._parser._save_body = False
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return internal_data.current_pkt.is_input and not self._headers_were_set
def _after_fetch_callable_checks(self, internal_data: DataStreamCtx):
if self._parser.headers_complete and not self._headers_were_set:
self._headers_were_set = True
return True
return False
def _contructor_hook(self):
self._parser.save_body = False
class HttpResponseHeader(HttpResponse):
"""
@@ -343,15 +418,5 @@ class HttpResponseHeader(HttpResponse):
This data handler will be called only once, the headers are complete, the body will be empty and not buffered
"""
def __init__(self):
super().__init__()
self._parser._save_body = False
def _before_fetch_callable_checks(self, internal_data: DataStreamCtx):
return not internal_data.current_pkt.is_input and not self._headers_were_set
def _after_fetch_callable_checks(self, internal_data: DataStreamCtx):
if self._parser.headers_complete and not self._headers_were_set:
self._headers_were_set = True
return True
return False
def _contructor_hook(self):
self._parser.save_body = False

View File

@@ -71,7 +71,7 @@ class TCPInputStream(InternalTCPStream):
TCPClientStream = TCPInputStream
class TCPOutputStream:
class TCPOutputStream(InternalTCPStream):
"""
This datamodel will assemble the TCP output stream from the server sent data.
The function that use this data model will be handled when:

View File

@@ -30,7 +30,27 @@ def filter_with_args(http_request: HttpRequest) -> int:
return REJECT
`
const IMPORT_FULL_ACTION_STREAM = `from firegex.nfproxy import FullStreamAction`
const IMPORT_FULL_ACTION_STREAM = `from firegex.nfproxy import FullStreamAction
# Here the definition of FullStreamAction enum
class FullStreamAction(Enum):
"""Action to be taken by the filter when the stream is full"""
FLUSH = 0
ACCEPT = 1
REJECT = 2
DROP = 3
`
const ENUM_IMPORT_AND_DEFINITION = `from firegex.nfproxy import ExceptionAction
# Here the definition of ExceptionAction enum
class ExceptionAction(Enum):
"""Action to be taken by the filter when an exception occurs (used in some cases)"""
ACCEPT = 0 # Accept the packet that caused the exception
DROP = 1 # Drop the connection that caused the exception
REJECT = 2 # Reject the connection that caused the exception
NOACTION = 3 # Do nothing, the excpetion will be signaled and the stream will be accepted without calling anymore the pyfilters (for the current stream)
`
export const HELP_NFPROXY_SIM = `➤ fgex nfproxy -h
@@ -241,9 +261,6 @@ export const NFProxyDocs = () => {
<List.Item>
<strong>method: </strong> The method of the request (read only)
</List.Item>
<List.Item>
<strong>has_begun: </strong> It's true if the request has begun, false if it's not. (read only)
</List.Item>
<List.Item>
<strong>headers_complete: </strong> It's true if the headers are complete, false if they are not. (read only)
</List.Item>
@@ -303,9 +320,6 @@ export const NFProxyDocs = () => {
<List.Item>
<strong>status_code: </strong> The status code of the response (read only) (int)
</List.Item>
<List.Item>
<strong>has_begun: </strong> It's true if the response has begun, false if it's not. (read only)
</List.Item>
<List.Item>
<strong>headers_complete: </strong> It's true if the headers are complete, false if they are not. (read only)
</List.Item>
@@ -357,6 +371,17 @@ export const NFProxyDocs = () => {
</List.Item>
</List>
</Text>
<Title order={2} mt="lg" mb="sm">⚠️ Other Options</Title>
<Text size="lg" my="xs">
Here's other enums that you could need to use:
<CodeHighlight code={ENUM_IMPORT_AND_DEFINITION} language="python" my="sm" />
Then you can set in the globals these options:
<List>
<List.Item>
<strong>FGEX_INVALID_ENCODING_ACTION: </strong> Sets the action performed when the stream has an invalid encoding (due to a parser crash). The default is ExceptionAction.REJECT.
</List.Item>
</List>
</Text>
<Title order={2} mt="lg" mb="sm">🚀 How It Works</Title>
<Text mb="sm" size="lg">
The proxy is built on a multi-threaded architecture and integrates Python for dynamic filtering:

View File

@@ -54,12 +54,12 @@ export const nfproxy = {
serviceinfo: async (service_id:string) => {
return await getapi(`nfproxy/services/${service_id}`) as Service;
},
pyfilterenable: async (filter_name:string) => {
const { status } = await postapi(`nfproxy/pyfilters/${filter_name}/enable`) as ServerResponse;
pyfilterenable: async (service_id:string, filter_name:string) => {
const { status } = await postapi(`nfproxy/services/${service_id}/pyfilters/${filter_name}/enable`) as ServerResponse;
return status === "ok"?undefined:status
},
pyfilterdisable: async (filter_name:string) => {
const { status } = await postapi(`nfproxy/pyfilters/${filter_name}/disable`) as ServerResponse;
pyfilterdisable: async (service_id:string, filter_name:string) => {
const { status } = await postapi(`nfproxy/services/${service_id}/pyfilters/${filter_name}/disable`) as ServerResponse;
return status === "ok"?undefined:status
},
servicestart: async (service_id:string) => {
@@ -89,10 +89,10 @@ export const nfproxy = {
return status === "ok"?undefined:status
},
getpyfilterscode: async (service_id:string) => {
return await getapi(`nfproxy/services/${service_id}/pyfilters/code`) as string;
return await getapi(`nfproxy/services/${service_id}/code`) as string;
},
setpyfilterscode: async (service_id:string, code:string) => {
const { status } = await putapi(`nfproxy/services/${service_id}/pyfilters/code`,{ code }) as ServerResponse;
const { status } = await putapi(`nfproxy/services/${service_id}/code`,{ code }) as ServerResponse;
return status === "ok"?undefined:status
}
}

View File

@@ -12,7 +12,7 @@ export default function PyFilterView({ filterInfo }:{ filterInfo:PyFilter }) {
const isMedium = isMediumScreen()
const changeRegexStatus = () => {
(filterInfo.active?nfproxy.pyfilterdisable:nfproxy.pyfilterenable)(filterInfo.name).then(res => {
(filterInfo.active?nfproxy.pyfilterdisable:nfproxy.pyfilterenable)(filterInfo.service_id, filterInfo.name).then(res => {
if(!res){
okNotify(`Filter ${filterInfo.name} ${filterInfo.active?"deactivated":"activated"} successfully!`,`Filter '${filterInfo.name}' has been ${filterInfo.active?"deactivated":"activated"}!`)
}else{

View File

@@ -52,6 +52,7 @@ export type RegexAddForm = {
export type PyFilter = {
name:string,
service_id:string,
blocked_packets:number,
edited_packets:number,
active:boolean

View File

@@ -15,8 +15,8 @@ You can also run tests manually:
$ ./api_test.py -h
usage: api_test.py [-h] [--address ADDRESS] --password PASSWORD
$ ./nf_test.py -h
usage: nf_test.py [-h] [--address ADDRESS] --password PASSWORD [--service_name SERVICE_NAME] [--port PORT]
$ ./nfregex_test.py -h
usage: nfregex_test.py [-h] [--address ADDRESS] --password PASSWORD [--service_name SERVICE_NAME] [--port PORT]
[--ipv6] [--proto {tcp,udp}]
optional arguments:

View File

@@ -36,7 +36,7 @@ else:
def exit_test(code):
if service_id:
server.kill()
if(firegex.nf_delete_service(service_id)):
if(firegex.nfregex_delete_service(service_id)):
puts("Sucessfully deleted service ✔", color=colors.green)
else:
puts("Test Failed: Coulnd't delete serivce ✗", color=colors.red)
@@ -45,12 +45,12 @@ def exit_test(code):
#Create new Service
srvs = firegex.nf_get_services()
srvs = firegex.nfregex_get_services()
for ele in srvs:
if ele['name'] == args.service_name:
firegex.nf_delete_service(ele['service_id'])
firegex.nfregex_delete_service(ele['service_id'])
service_id = firegex.nf_add_service(args.service_name, args.port, "tcp", "127.0.0.1/24")
service_id = firegex.nfregex_add_service(args.service_name, args.port, "tcp", "127.0.0.1/24")
if service_id:
puts(f"Sucessfully created service {service_id}", color=colors.green)
else:
@@ -105,7 +105,7 @@ print(f"{getReading(args.port)} MB/s")
#Start firewall
if firegex.nf_start_service(service_id):
if firegex.nfregex_start_service(service_id):
puts(f"Sucessfully started service with id {service_id}", color=colors.green)
else:
puts("Benchmark Failed: Coulnd't start the service ✗", color=colors.red)
@@ -120,7 +120,7 @@ print(f"{results[0]} MB/s")
#Add all the regexs
for i in range(1,args.num_of_regexes+1):
regex = gen_regex()
if not firegex.nf_add_regex(service_id,regex,"B",active=True,is_case_sensitive=False):
if not firegex.nfregex_add_regex(service_id,regex,"B",active=True,is_case_sensitive=False):
puts("Benchmark Failed: Couldn't add the regex ✗", color=colors.red)
exit_test(1)
puts(f"Performance with {i} regex(s): ", color=colors.red, end='')
@@ -135,7 +135,7 @@ with open(args.output_file,'w') as f:
puts(f"Sucessfully written results to {args.output_file}", color=colors.magenta)
#Delete the Service
if firegex.nf_delete_service(service_id):
if firegex.nfregex_delete_service(service_id):
puts(f"Sucessfully delete service with id {service_id}", color=colors.green)
else:
puts("Test Failed: Couldn't delete service ✗", color=colors.red)

534
tests/nfproxy_test.py Normal file
View File

@@ -0,0 +1,534 @@
#!/usr/bin/env python3
from utils.colors import colors, puts, sep
from utils.firegexapi import FiregexAPI
from utils.tcpserver import TcpServer
import argparse
import secrets
import time
parser = argparse.ArgumentParser()
parser.add_argument("--address", "-a", type=str , required=False, help='Address of firegex backend', default="http://127.0.0.1:4444/")
parser.add_argument("--password", "-p", type=str, required=True, help='Firegex password')
parser.add_argument("--service_name", "-n", type=str , required=False, help='Name of the test service', default="Test Service")
parser.add_argument("--port", "-P", type=int , required=False, help='Port of the test service', default=1337)
parser.add_argument("--ipv6", "-6" , action="store_true", help='Test Ipv6', default=False)
args = parser.parse_args()
sep()
puts("Testing will start on ", color=colors.cyan, end="")
puts(f"{args.address}", color=colors.yellow)
firegex = FiregexAPI(args.address)
#Login
if (firegex.login(args.password)):
puts("Sucessfully logged in ✔", color=colors.green)
else:
puts("Test Failed: Unknown response or wrong passowrd ✗", color=colors.red)
exit(1)
#Create server
server = TcpServer(args.port,ipv6=args.ipv6)
srvs = firegex.nfproxy_get_services()
for ele in srvs:
if ele['name'] == args.service_name:
firegex.nfproxy_delete_service(ele['service_id'])
service_id = firegex.nfproxy_add_service(args.service_name, args.port, "http" , "::1" if args.ipv6 else "127.0.0.1" )
if service_id:
puts(f"Sucessfully created service {service_id}", color=colors.green)
else:
puts("Test Failed: Failed to create service ✗", color=colors.red)
exit(1)
def exit_test(code):
if service_id:
server.stop()
#if firegex.nfproxy_delete_service(service_id):
# puts("Sucessfully deleted service ✔", color=colors.green)
#else:
# puts("Test Failed: Coulnd't delete serivce ✗", color=colors.red)
exit(code)
if(firegex.nfproxy_start_service(service_id)):
puts("Sucessfully started service ✔", color=colors.green)
else:
puts("Test Failed: Failed to start service ✗", color=colors.red)
exit_test(1)
server.start()
time.sleep(0.5)
try:
if server.sendCheckData(secrets.token_bytes(432)):
puts("Successfully tested first proxy with no filters ✔", color=colors.green)
else:
puts("Test Failed: Data was corrupted ", color=colors.red)
exit_test(1)
except Exception:
puts("Test Failed: Couldn't send data to the server ", color=colors.red)
exit_test(1)
BASE_FILTER_VERDICT_TEST = """
from firegex.nfproxy.models import RawPacket
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def verdict_test(packet:RawPacket):
if b"%%TEST%%" in packet.data:
packet.l4_data = packet.l4_data.replace(b"%%TEST%%", b"%%MANGLE%%")
return %%ACTION%%
"""
BASE_FILTER_VERDICT_NAME = "verdict_test"
def get_vedict_test(to_match:str, action:str, mangle_to:str="REDACTED"):
return BASE_FILTER_VERDICT_TEST.replace("%%TEST%%", to_match).replace("%%ACTION%%", action).replace("%%MANGLE%%", mangle_to)
#Check if filter is present in the service
n_blocked = 0
n_mangled = 0
def checkFilter(match_bytes, filter_name, should_work=True, mangle_with=None):
if mangle_with:
if should_work:
global n_mangled
for r in firegex.nfproxy_get_service_pyfilters(service_id):
if r["name"] == filter_name:
#Test the filter
pre_packet = secrets.token_bytes(40)
post_packet = secrets.token_bytes(40)
server.connect_client()
server.send_packet(pre_packet + match_bytes + post_packet)
real_response = server.recv_packet()
expected_response = pre_packet + mangle_with + post_packet
if real_response == expected_response:
puts("The malicious request was successfully mangled ✔", color=colors.green)
n_mangled += 1
time.sleep(1)
if firegex.nfproxy_get_pyfilter(service_id, filter_name)["edited_packets"] == n_mangled:
puts("The packet was reported as mangled in the API ✔", color=colors.green)
else:
puts("Test Failed: The packet wasn't reported as mangled in the API ✗", color=colors.red)
exit_test(1)
server.send_packet(pre_packet)
if server.recv_packet() == pre_packet:
puts("Is able to communicate after mangle ✔", color=colors.green)
else:
puts("Test Failed: Couldn't communicate after mangle ✗", color=colors.red)
exit_test(1)
else:
puts("Test Failed: The request wasn't mangled ✗", color=colors.red)
exit_test(1)
server.close_client()
return
puts("Test Failed: The filter wasn't found ✗", color=colors.red)
else:
if server.sendCheckData(secrets.token_bytes(40) + match_bytes + secrets.token_bytes(40)):
puts("The request wasn't mangled ✔", color=colors.green)
else:
puts("Test Failed: The request was mangled when it shouldn't have", color=colors.red)
exit_test(1)
else:
if should_work:
global n_blocked
for r in firegex.nfproxy_get_service_pyfilters(service_id):
if r["name"] == filter_name:
#Test the filter
if not server.sendCheckData(secrets.token_bytes(40) + match_bytes + secrets.token_bytes(40)):
puts("The malicious request was successfully blocked ✔", color=colors.green)
n_blocked += 1
time.sleep(1)
if firegex.nfproxy_get_pyfilter(service_id, filter_name)["blocked_packets"] == n_blocked:
puts("The packet was reported as blocked in the API ✔", color=colors.green)
else:
puts("Test Failed: The packet wasn't reported as blocked in the API ✗", color=colors.red)
exit_test(1)
else:
puts("Test Failed: The request wasn't blocked ✗", color=colors.red)
exit_test(1)
return
puts("Test Failed: The filter wasn't found ✗", color=colors.red)
exit_test(1)
else:
if server.sendCheckData(secrets.token_bytes(40) + match_bytes + secrets.token_bytes(40)):
puts("The request wasn't blocked ✔", color=colors.green)
else:
puts("Test Failed: The request was blocked when it shouldn't have", color=colors.red)
exit_test(1)
#Add new filter
secret = bytes(secrets.token_hex(16).encode())
if firegex.nfproxy_set_code(service_id,get_vedict_test(secret.decode(), "REJECT")):
puts(f"Sucessfully added filter for {str(secret)} in REJECT mode ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
checkFilter(secret, BASE_FILTER_VERDICT_NAME)
#Pause the proxy
if firegex.nfproxy_stop_service(service_id):
puts(f"Sucessfully paused service with id {service_id}", color=colors.green)
else:
puts("Test Failed: Coulnd't pause the service ✗", color=colors.red)
exit_test(1)
#Check if it's actually paused
checkFilter(secret, BASE_FILTER_VERDICT_NAME, should_work=False)
#Start firewall
if firegex.nfproxy_start_service(service_id):
puts(f"Sucessfully started service with id {service_id}", color=colors.green)
else:
puts("Test Failed: Coulnd't start the service ✗", color=colors.red)
exit_test(1)
checkFilter(secret, BASE_FILTER_VERDICT_NAME)
#Disable filter
if firegex.nfproxy_disable_pyfilter(service_id, BASE_FILTER_VERDICT_NAME):
puts(f"Sucessfully disabled filter {BASE_FILTER_VERDICT_NAME}", color=colors.green)
else:
puts("Test Failed: Coulnd't disable the filter ✗", color=colors.red)
exit_test(1)
#Check if it's actually disabled
checkFilter(secret, BASE_FILTER_VERDICT_NAME, should_work=False)
#Enable filter
if firegex.nfproxy_enable_pyfilter(service_id, BASE_FILTER_VERDICT_NAME):
puts(f"Sucessfully enabled filter {BASE_FILTER_VERDICT_NAME}", color=colors.green)
else:
puts("Test Failed: Coulnd't enable the regex ✗", color=colors.red)
exit_test(1)
checkFilter(secret, BASE_FILTER_VERDICT_NAME)
def remove_filters():
global n_blocked, n_mangled
server.stop()
server.start()
if not firegex.nfproxy_set_code(service_id, ""):
puts("Test Failed: Couldn't remove the filter ✗", color=colors.red)
exit_test(1)
n_blocked = 0
n_mangled = 0
remove_filters()
#Check if it's actually deleted
checkFilter(secret, BASE_FILTER_VERDICT_NAME, should_work=False)
#Check if DROP works
if firegex.nfproxy_set_code(service_id,get_vedict_test(secret.decode(), "DROP")):
puts(f"Sucessfully added filter for {str(secret)} in DROP mode ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
checkFilter(secret, BASE_FILTER_VERDICT_NAME)
remove_filters()
#Check if UNSTABLE_MANGLE works
mangle_result = secrets.token_hex(4).encode() # Mangle to a smaller packet
if firegex.nfproxy_set_code(service_id, get_vedict_test(secret.decode(), "UNSTABLE_MANGLE", mangle_result.decode())):
puts(f"Sucessfully added filter for {str(secret)} in UNSTABLE_MANGLE mode to a smaller packet size ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
checkFilter(secret, BASE_FILTER_VERDICT_NAME, mangle_with=mangle_result)
remove_filters()
#Check if UNSTABLE_MANGLE works
mangle_result = secrets.token_hex(60).encode() # Mangle to a bigger packet
if firegex.nfproxy_set_code(service_id, get_vedict_test(secret.decode(), "UNSTABLE_MANGLE", mangle_result.decode())):
puts(f"Sucessfully added filter for {str(secret)} in UNSTABLE_MANGLE mode to a bigger packet size ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
checkFilter(secret, BASE_FILTER_VERDICT_NAME, mangle_with=mangle_result)
remove_filters()
secret = b"8331ee1bf75893dd7fa3d34f29bac7fc8935aa3ef6c565fe8b395ef7f485"
TCP_INPUT_STREAM_TEST = f"""
from firegex.nfproxy.models import TCPInputStream
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def data_type_test(packet:TCPInputStream):
if {repr(secret)} in packet.data:
return REJECT
"""
if firegex.nfproxy_set_code(service_id, TCP_INPUT_STREAM_TEST):
puts(f"Sucessfully added filter for {str(secret)} for TCPInputStream ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
data_split = len(secret)//2
server.connect_client()
server.send_packet(secret[:data_split])
if server.recv_packet() == secret[:data_split]:
puts("The half-packet was successfully sent and received ✔", color=colors.green)
else:
puts("Test Failed: The half-packet wasn't received ✗", color=colors.red)
exit_test(1)
server.send_packet(secret[data_split:])
if not server.recv_packet():
puts("The malicious request was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The request wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
remove_filters()
secret = b"8331ee1bf75893dd7fa3d34f29bac7fc8935aa3ef6c565fe8b395ef7f485"
TCP_OUTPUT_STREAM_TEST = f"""
from firegex.nfproxy.models import TCPOutputStream
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def data_type_test(packet:TCPOutputStream):
if {repr(secret)} in packet.data:
return REJECT
"""
if firegex.nfproxy_set_code(service_id, TCP_OUTPUT_STREAM_TEST):
puts(f"Sucessfully added filter for {str(secret)} for TCPOutputStream ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
data_split = len(secret)//2
server.connect_client()
server.send_packet(secret[:data_split])
if server.recv_packet() == secret[:data_split]:
puts("The half-packet was successfully sent and received ✔", color=colors.green)
else:
puts("Test Failed: The half-packet wasn't received ✗", color=colors.red)
exit_test(1)
server.send_packet(secret[data_split:])
if not server.recv_packet():
puts("The malicious request was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The request wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
remove_filters()
secret = b"8331ee1bf75893dd7fa3d34f29bac7fc8935aa3ef6c565fe8b395ef7f485"
REQUEST_HEADER_TEST = f"""POST / HTTP/1.1
Host: localhost
X-TeSt: {secret.decode()}
Content-Length: 15
A Friendly Body""".replace("\n", "\r\n")
REQUEST_BODY_TEST = f"""POST / HTTP/1.1
Host: localhost
X-TeSt: NotTheSecret
Content-Length: {len(secret.decode())}
{secret.decode()}""".replace("\n", "\r\n")
HTTP_REQUEST_STREAM_TEST = f"""
from firegex.nfproxy.models import HttpRequest
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def data_type_test(req:HttpRequest):
if {repr(secret.decode())} in req.get_header("x-test"):
return REJECT
if req.body:
if {repr(secret)} in req.body:
return REJECT
"""
if firegex.nfproxy_set_code(service_id, HTTP_REQUEST_STREAM_TEST):
puts(f"Sucessfully added filter for {str(secret)} for HttpRequest ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
server.connect_client()
server.send_packet(REQUEST_HEADER_TEST.encode())
if not server.recv_packet():
puts("The malicious HTTP request with the malicious header was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The HTTP request with the malicious header wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
server.connect_client()
server.send_packet(REQUEST_BODY_TEST.encode())
if not server.recv_packet():
puts("The malicious HTTP request with the malicious body was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The HTTP request with the malicious body wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
remove_filters()
HTTP_REQUEST_HEADER_STREAM_TEST = f"""
from firegex.nfproxy.models import HttpRequestHeader
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def data_type_test(req:HttpRequestHeader):
if {repr(secret.decode())} in req.get_header("x-test"):
return REJECT
"""
if firegex.nfproxy_set_code(service_id, HTTP_REQUEST_HEADER_STREAM_TEST):
puts(f"Sucessfully added filter for {str(secret)} for HttpRequestHeader ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
server.connect_client()
server.send_packet(REQUEST_HEADER_TEST.encode())
if not server.recv_packet():
puts("The malicious HTTP request with the malicious header was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The HTTP request with the malicious header wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
remove_filters()
secret = b"8331ee1bf75893dd7fa3d34f29bac7fc8935aa3ef6c565fe8b395ef7f485"
RESPONSE_HEADER_TEST = f"""HTTP/1.1 200 OK
Host: localhost
X-TeSt: {secret.decode()}
Content-Length: 15
A Friendly Body""".replace("\n", "\r\n")
RESPONSE_BODY_TEST = f"""HTTP/1.1 200 OK
Host: localhost
X-TeSt: NotTheSecret
Content-Length: {len(secret.decode())}
{secret.decode()}""".replace("\n", "\r\n")
HTTP_RESPONSE_STREAM_TEST = f"""
from firegex.nfproxy.models import HttpResponse
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def data_type_test(req:HttpResponse):
if {repr(secret.decode())} in req.get_header("x-test"):
return REJECT
if req.body:
if {repr(secret)} in req.body:
return REJECT
"""
if firegex.nfproxy_set_code(service_id, HTTP_RESPONSE_STREAM_TEST):
puts(f"Sucessfully added filter for {str(secret)} for HttpResponse ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
server.connect_client()
server.send_packet(RESPONSE_HEADER_TEST.encode())
if not server.recv_packet():
puts("The malicious HTTP request with the malicious header was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The HTTP request with the malicious header wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
server.connect_client()
server.send_packet(RESPONSE_BODY_TEST.encode())
if not server.recv_packet():
puts("The malicious HTTP request with the malicious body was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The HTTP request with the malicious body wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
remove_filters()
HTTP_RESPONSE_HEADER_STREAM_TEST = f"""
from firegex.nfproxy.models import HttpResponseHeader
from firegex.nfproxy import pyfilter, ACCEPT, UNSTABLE_MANGLE, DROP, REJECT
@pyfilter
def data_type_test(req:HttpResponseHeader):
if {repr(secret.decode())} in req.get_header("x-test"):
return REJECT
"""
if firegex.nfproxy_set_code(service_id, HTTP_RESPONSE_HEADER_STREAM_TEST):
puts(f"Sucessfully added filter for {str(secret)} for HttpResponseHeader ✔", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the filter {str(secret)}", color=colors.red)
exit_test(1)
server.connect_client()
server.send_packet(RESPONSE_HEADER_TEST.encode())
if not server.recv_packet():
puts("The malicious HTTP request with the malicious header was successfully blocked ✔", color=colors.green)
else:
puts("Test Failed: The HTTP request with the malicious header wasn't blocked ✗", color=colors.red)
exit_test(1)
server.close_client()
remove_filters()
#Rename service
if firegex.nfproxy_rename_service(service_id,f"{args.service_name}2"):
puts(f"Sucessfully renamed service to {args.service_name}2 ✔", color=colors.green)
else:
puts("Test Failed: Coulnd't rename service ✗", color=colors.red)
exit_test(1)
#Check if service was renamed correctly
service = firegex.nfproxy_get_service(service_id)
if service["name"] == f"{args.service_name}2":
puts("Checked that service was renamed correctly ✔", color=colors.green)
else:
puts("Test Failed: Service wasn't renamed correctly ✗", color=colors.red)
exit_test(1)
#Rename back service
if(firegex.nfproxy_rename_service(service_id,f"{args.service_name}")):
puts(f"Sucessfully renamed service to {args.service_name}", color=colors.green)
else:
puts("Test Failed: Coulnd't rename service ✗", color=colors.red)
exit_test(1)
#Change settings
if(firegex.nfproxy_settings_service(service_id, 1338, "::dead:beef" if args.ipv6 else "123.123.123.123", True)):
srv_updated = firegex.nfproxy_get_service(service_id)
if srv_updated["port"] == 1338 and ("::dead:beef" if args.ipv6 else "123.123.123.123") in srv_updated["ip_int"] and srv_updated["fail_open"]:
puts("Sucessfully changed service settings ✔", color=colors.green)
else:
puts("Test Failed: Service settings weren't updated correctly ✗", color=colors.red)
exit_test(1)
else:
puts("Test Failed: Coulnd't change service settings ✗", color=colors.red)
exit_test(1)
exit_test(0)

View File

@@ -36,26 +36,26 @@ server = (TcpServer if args.proto == "tcp" else UdpServer)(args.port,ipv6=args.i
def exit_test(code):
if service_id:
server.stop()
if(firegex.nf_delete_service(service_id)):
if(firegex.nfregex_delete_service(service_id)):
puts("Sucessfully deleted service ✔", color=colors.green)
else:
puts("Test Failed: Coulnd't delete serivce ✗", color=colors.red)
exit_test(1)
exit(code)
srvs = firegex.nf_get_services()
srvs = firegex.nfregex_get_services()
for ele in srvs:
if ele['name'] == args.service_name:
firegex.nf_delete_service(ele['service_id'])
firegex.nfregex_delete_service(ele['service_id'])
service_id = firegex.nf_add_service(args.service_name, args.port, args.proto , "::1" if args.ipv6 else "127.0.0.1" )
service_id = firegex.nfregex_add_service(args.service_name, args.port, args.proto , "::1" if args.ipv6 else "127.0.0.1" )
if service_id:
puts(f"Sucessfully created service {service_id}", color=colors.green)
else:
puts("Test Failed: Failed to create service ✗", color=colors.red)
exit(1)
if(firegex.nf_start_service(service_id)):
if(firegex.nfregex_start_service(service_id)):
puts("Sucessfully started service ✔", color=colors.green)
else:
puts("Test Failed: Failed to start service ✗", color=colors.red)
@@ -76,7 +76,7 @@ except Exception:
secret = bytes(secrets.token_hex(16).encode())
regex = base64.b64encode(secret).decode()
if firegex.nf_add_regex(service_id,regex,"B",active=True,is_case_sensitive=True):
if firegex.nfregex_add_regex(service_id,regex,"B",active=True,is_case_sensitive=True):
puts(f"Sucessfully added regex {str(secret)}", color=colors.green)
else:
puts(f"Test Failed: Couldn't add the regex {str(secret)}", color=colors.red)
@@ -87,14 +87,14 @@ else:
n_blocked = 0
def getMetric(metric_name, regex):
for metric in firegex.nf_get_metrics().split("\n"):
for metric in firegex.nfregex_get_metrics().split("\n"):
if metric.startswith(metric_name + "{") and f'regex="{regex}"' in metric:
return int(metric.split(" ")[-1])
def checkRegex(regex, should_work=True, upper=False, deleted=False):
if should_work:
global n_blocked
for r in firegex.nf_get_service_regexes(service_id):
for r in firegex.nfregex_get_service_regexes(service_id):
if r["regex"] == regex:
#Test the regex
s = base64.b64decode(regex).upper() if upper else base64.b64decode(regex)
@@ -102,7 +102,7 @@ def checkRegex(regex, should_work=True, upper=False, deleted=False):
puts("The malicious request was successfully blocked ✔", color=colors.green)
n_blocked += 1
time.sleep(1)
if firegex.nf_get_regex(r["id"])["n_packets"] == n_blocked:
if firegex.nfregex_get_regex(r["id"])["n_packets"] == n_blocked:
puts("The packet was reported as blocked in the API ✔", color=colors.green)
else:
puts("Test Failed: The packet wasn't reported as blocked in the API ✗", color=colors.red)
@@ -139,16 +139,16 @@ def checkRegex(regex, should_work=True, upper=False, deleted=False):
def clear_regexes():
global n_blocked
n_blocked = 0
for r in firegex.nf_get_service_regexes(service_id):
for r in firegex.nfregex_get_service_regexes(service_id):
if r["regex"] == regex:
if(firegex.nf_delete_regex(r["id"])):
if(firegex.nfregex_delete_regex(r["id"])):
puts(f"Sucessfully deleted regex with id {r['id']}", color=colors.green)
else:
puts("Test Failed: Coulnd't delete the regex ✗", color=colors.red)
exit_test(1)
break
if f'regex="{secret.decode()}"' not in firegex.nf_get_metrics():
puts(f"No regex metrics after deletion ✔", color=colors.green)
if f'regex="{secret.decode()}"' not in firegex.nfregex_get_metrics():
puts("No regex metrics after deletion ✔", color=colors.green)
else:
puts("Test Failed: Metrics found after deleting the regex ✗", color=colors.red)
exit_test(1)
@@ -156,7 +156,7 @@ def clear_regexes():
checkRegex(regex)
#Pause the proxy
if(firegex.nf_stop_service(service_id)):
if(firegex.nfregex_stop_service(service_id)):
puts(f"Sucessfully paused service with id {service_id}", color=colors.green)
else:
puts("Test Failed: Coulnd't pause the service ✗", color=colors.red)
@@ -166,7 +166,7 @@ else:
checkRegex(regex,should_work=False)
#Start firewall
if(firegex.nf_start_service(service_id)):
if(firegex.nfregex_start_service(service_id)):
puts(f"Sucessfully started service with id {service_id}", color=colors.green)
else:
puts("Test Failed: Coulnd't start the service ✗", color=colors.red)
@@ -175,9 +175,9 @@ else:
checkRegex(regex)
#Disable regex
for r in firegex.nf_get_service_regexes(service_id):
for r in firegex.nfregex_get_service_regexes(service_id):
if r["regex"] == regex:
if(firegex.nf_disable_regex(r["id"])):
if(firegex.nfregex_disable_regex(r["id"])):
puts(f"Sucessfully disabled regex with id {r['id']}", color=colors.green)
else:
puts("Test Failed: Coulnd't disable the regex ✗", color=colors.red)
@@ -188,9 +188,9 @@ for r in firegex.nf_get_service_regexes(service_id):
checkRegex(regex,should_work=False)
#Enable regex
for r in firegex.nf_get_service_regexes(service_id):
for r in firegex.nfregex_get_service_regexes(service_id):
if r["regex"] == regex:
if(firegex.nf_enable_regex(r["id"])):
if(firegex.nfregex_enable_regex(r["id"])):
puts(f"Sucessfully enabled regex with id {r['id']}", color=colors.green)
else:
puts("Test Failed: Coulnd't enable the regex ✗", color=colors.red)
@@ -206,7 +206,7 @@ clear_regexes()
checkRegex(regex,should_work=False,deleted=True)
#Add case insensitive regex
if(firegex.nf_add_regex(service_id,regex,"B",active=True, is_case_sensitive=False)):
if(firegex.nfregex_add_regex(service_id,regex,"B",active=True, is_case_sensitive=False)):
puts(f"Sucessfully added case insensitive regex {str(secret)}", color=colors.green)
else:
puts(f"Test Failed: Coulnd't add the case insensitive regex {str(secret)}", color=colors.red)
@@ -218,24 +218,31 @@ checkRegex(regex)
clear_regexes()
#Rename service
if(firegex.nf_rename_service(service_id,f"{args.service_name}2")):
if(firegex.nfregex_rename_service(service_id,f"{args.service_name}2")):
puts(f"Sucessfully renamed service to {args.service_name}2 ✔", color=colors.green)
else:
puts("Test Failed: Coulnd't rename service ✗", color=colors.red)
exit_test(1)
#Check if service was renamed correctly
service = firegex.nf_get_service(service_id)
service = firegex.nfregex_get_service(service_id)
if service["name"] == f"{args.service_name}2":
puts("Checked that service was renamed correctly ✔", color=colors.green)
else:
puts("Test Failed: Service wasn't renamed correctly ✗", color=colors.red)
exit_test(1)
#Rename back service
if(firegex.nfregex_rename_service(service_id,f"{args.service_name}")):
puts(f"Sucessfully renamed service to {args.service_name}", color=colors.green)
else:
puts("Test Failed: Coulnd't rename service ✗", color=colors.red)
exit_test(1)
#Change settings
opposite_proto = "udp" if args.proto == "tcp" else "tcp"
if(firegex.nf_settings_service(service_id, 1338, opposite_proto, "::dead:beef" if args.ipv6 else "123.123.123.123", True)):
srv_updated = firegex.nf_get_service(service_id)
if(firegex.nfregex_settings_service(service_id, 1338, opposite_proto, "::dead:beef" if args.ipv6 else "123.123.123.123", True)):
srv_updated = firegex.nfregex_get_service(service_id)
if srv_updated["port"] == 1338 and srv_updated["proto"] == opposite_proto and ("::dead:beef" if args.ipv6 else "123.123.123.123") in srv_updated["ip_int"] and srv_updated["fail_open"]:
puts("Sucessfully changed service settings ✔", color=colors.green)
else:

View File

@@ -16,13 +16,13 @@ done
echo "Running standard API test"
python3 api_test.py -p $PASSWORD || ERROR=1
echo "Running Netfilter Regex TCP ipv4"
python3 nf_test.py -p $PASSWORD -m tcp || ERROR=1
python3 nfregex_test.py -p $PASSWORD -m tcp || ERROR=1
echo "Running Netfilter Regex TCP ipv6"
python3 nf_test.py -p $PASSWORD -m tcp -6 || ERROR=1
python3 nfregex_test.py -p $PASSWORD -m tcp -6 || ERROR=1
echo "Running Netfilter Regex UDP ipv4"
python3 nf_test.py -p $PASSWORD -m udp || ERROR=1
python3 nfregex_test.py -p $PASSWORD -m udp || ERROR=1
echo "Running Netfilter Regex UDP ipv6"
python3 nf_test.py -p $PASSWORD -m udp -6 || ERROR=1
python3 nfregex_test.py -p $PASSWORD -m udp -6 || ERROR=1
echo "Running Port Hijack TCP ipv4"
python3 ph_test.py -p $PASSWORD -m tcp || ERROR=1
echo "Running Port Hijack TCP ipv6"
@@ -31,6 +31,10 @@ echo "Running Port Hijack UDP ipv4"
python3 ph_test.py -p $PASSWORD -m udp || ERROR=1
echo "Running Port Hijack UDP ipv6"
python3 ph_test.py -p $PASSWORD -m udp -6 || ERROR=1
echo "Running Netfilter Proxy ipv4"
python3 nfproxy_test.py -p $PASSWORD || ERROR=1
echo "Running Netfilter Proxy ipv6"
python3 nfproxy_test.py -p $PASSWORD -6 || ERROR=1
if [[ "$ERROR" == "0" ]] then
python3 benchmark.py -p $PASSWORD -r 5 -d 1 -s 10 || ERROR=1

View File

@@ -78,65 +78,65 @@ class FiregexAPI:
def reset(self, delete: bool):
self.s.post(f"{self.address}api/reset", json={"delete":delete})
def nf_get_services(self):
def nfregex_get_services(self):
req = self.s.get(f"{self.address}api/nfregex/services")
return req.json()
def nf_get_service(self,service_id: str):
def nfregex_get_service(self,service_id: str):
req = self.s.get(f"{self.address}api/nfregex/services/{service_id}")
return req.json()
def nf_stop_service(self,service_id: str):
def nfregex_stop_service(self,service_id: str):
req = self.s.post(f"{self.address}api/nfregex/services/{service_id}/stop")
return verify(req)
def nf_start_service(self,service_id: str):
def nfregex_start_service(self,service_id: str):
req = self.s.post(f"{self.address}api/nfregex/services/{service_id}/start")
return verify(req)
def nf_delete_service(self,service_id: str):
def nfregex_delete_service(self,service_id: str):
req = self.s.delete(f"{self.address}api/nfregex/services/{service_id}")
return verify(req)
def nf_rename_service(self,service_id: str, newname: str):
def nfregex_rename_service(self,service_id: str, newname: str):
req = self.s.put(f"{self.address}api/nfregex/services/{service_id}/rename" , json={"name":newname})
return verify(req)
def nf_settings_service(self,service_id: str, port: int, proto: str, ip_int: str, fail_open: bool):
def nfregex_settings_service(self,service_id: str, port: int, proto: str, ip_int: str, fail_open: bool):
req = self.s.put(f"{self.address}api/nfregex/services/{service_id}/settings" , json={"port":port, "proto":proto, "ip_int":ip_int, "fail_open":fail_open})
return verify(req)
def nf_get_service_regexes(self,service_id: str):
def nfregex_get_service_regexes(self,service_id: str):
req = self.s.get(f"{self.address}api/nfregex/services/{service_id}/regexes")
return req.json()
def nf_get_regex(self,regex_id: str):
def nfregex_get_regex(self,regex_id: str):
req = self.s.get(f"{self.address}api/nfregex/regexes/{regex_id}")
return req.json()
def nf_delete_regex(self,regex_id: str):
def nfregex_delete_regex(self,regex_id: str):
req = self.s.delete(f"{self.address}api/nfregex/regexes/{regex_id}")
return verify(req)
def nf_enable_regex(self,regex_id: str):
def nfregex_enable_regex(self,regex_id: str):
req = self.s.post(f"{self.address}api/nfregex/regexes/{regex_id}/enable")
return verify(req)
def nf_disable_regex(self,regex_id: str):
def nfregex_disable_regex(self,regex_id: str):
req = self.s.post(f"{self.address}api/nfregex/regexes/{regex_id}/disable")
return verify(req)
def nf_add_regex(self, service_id: str, regex: str, mode: str, active: bool, is_case_sensitive: bool):
def nfregex_add_regex(self, service_id: str, regex: str, mode: str, active: bool, is_case_sensitive: bool):
req = self.s.post(f"{self.address}api/nfregex/regexes",
json={"service_id": service_id, "regex": regex, "mode": mode, "active": active, "is_case_sensitive": is_case_sensitive})
return verify(req)
def nf_add_service(self, name: str, port: int, proto: str, ip_int: str, fail_open: bool = False):
def nfregex_add_service(self, name: str, port: int, proto: str, ip_int: str, fail_open: bool = False):
req = self.s.post(f"{self.address}api/nfregex/services" ,
json={"name":name,"port":port, "proto": proto, "ip_int": ip_int, "fail_open": fail_open})
return req.json()["service_id"] if verify(req) else False
def nf_get_metrics(self):
def nfregex_get_metrics(self):
req = self.s.get(f"{self.address}api/nfregex/metrics")
return req.text
@@ -173,3 +173,60 @@ class FiregexAPI:
req = self.s.post(f"{self.address}api/porthijack/services" ,
json={"name":name, "public_port": public_port, "proxy_port":proxy_port, "proto": proto, "ip_src": ip_src, "ip_dst": ip_dst})
return req.json()["service_id"] if verify(req) else False
def nfproxy_get_services(self):
req = self.s.get(f"{self.address}api/nfproxy/services")
return req.json()
def nfproxy_get_service(self,service_id: str):
req = self.s.get(f"{self.address}api/nfproxy/services/{service_id}")
return req.json()
def nfproxy_stop_service(self,service_id: str):
req = self.s.post(f"{self.address}api/nfproxy/services/{service_id}/stop")
return verify(req)
def nfproxy_start_service(self,service_id: str):
req = self.s.post(f"{self.address}api/nfproxy/services/{service_id}/start")
return verify(req)
def nfproxy_delete_service(self,service_id: str):
req = self.s.delete(f"{self.address}api/nfproxy/services/{service_id}")
return verify(req)
def nfproxy_rename_service(self,service_id: str, newname: str):
req = self.s.put(f"{self.address}api/nfproxy/services/{service_id}/rename" , json={"name":newname})
return verify(req)
def nfproxy_settings_service(self,service_id: str, port: int, ip_int: str, fail_open: bool):
req = self.s.put(f"{self.address}api/nfproxy/services/{service_id}/settings" , json={"port":port, "ip_int":ip_int, "fail_open":fail_open})
return verify(req)
def nfproxy_get_service_pyfilters(self,service_id: str):
req = self.s.get(f"{self.address}api/nfproxy/services/{service_id}/pyfilters")
return req.json()
def nfproxy_get_pyfilter(self, service_id:str, filter_name: str):
req = self.s.get(f"{self.address}api/nfproxy/services/{service_id}/pyfilters/{filter_name}")
return req.json()
def nfproxy_enable_pyfilter(self, service_id:str, filter_name: str):
req = self.s.post(f"{self.address}api/nfproxy/services/{service_id}/pyfilters/{filter_name}/enable")
return verify(req)
def nfproxy_disable_pyfilter(self, service_id:str, filter_name: str):
req = self.s.post(f"{self.address}api/nfproxy/services/{service_id}/pyfilters/{filter_name}/disable")
return verify(req)
def nfproxy_add_service(self, name: str, port: int, proto: str, ip_int: str, fail_open: bool = False):
req = self.s.post(f"{self.address}api/nfproxy/services" ,
json={"name":name,"port":port, "proto": proto, "ip_int": ip_int, "fail_open": fail_open})
return req.json()["service_id"] if verify(req) else False
def nfproxy_get_code(self, service_id: str):
req = self.s.get(f"{self.address}api/nfproxy/services/{service_id}/code")
return req.text
def nfproxy_set_code(self, service_id: str, code: str):
req = self.s.put(f"{self.address}api/nfproxy/services/{service_id}/code", json={"code":code})
return verify(req)

View File

@@ -1,34 +1,72 @@
from multiprocessing import Process
import socket
import traceback
class TcpServer:
def __init__(self,port,ipv6,proxy_port=None):
def _startServer(port):
sock = socket.socket(socket.AF_INET6 if ipv6 else socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('::1' if ipv6 else '127.0.0.1', port))
sock.listen(8)
while True:
connection,address = sock.accept()
buf = connection.recv(4096)
connection.send(buf)
connection.close()
def __init__(self,port,ipv6,proxy_port=None, verbose=False):
self.proxy_port = proxy_port
self.ipv6 = ipv6
self.port = port
self.server = Process(target=_startServer,args=[port])
self.verbose = verbose
self._regen_process()
def _regen_process(self):
def _startServer(port):
sock = socket.socket(socket.AF_INET6 if self.ipv6 else socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('::1' if self.ipv6 else '127.0.0.1', port))
sock.listen(8)
while True:
connection,address = sock.accept()
while True:
try:
buf = connection.recv(4096)
if buf == b'':
break
if self.verbose:
print("SERVER: ", buf)
connection.sendall(buf)
except Exception:
if self.verbose:
traceback.print_exc()
connection.close()
self.server = Process(target=_startServer,args=[self.port])
def start(self):
self.server.start()
def stop(self):
self.server.terminate()
self.server.join()
self._regen_process()
def sendCheckData(self,data):
s = socket.socket(socket.AF_INET6 if self.ipv6 else socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
s.connect(('::1' if self.ipv6 else '127.0.0.1', self.proxy_port if self.proxy_port else self.port), )
s.sendall(data)
received_data = s.recv(4096)
s.close()
def connect_client(self):
self.client_sock = socket.socket(socket.AF_INET6 if self.ipv6 else socket.AF_INET, socket.SOCK_STREAM)
self.client_sock.settimeout(1)
self.client_sock.connect(('::1' if self.ipv6 else '127.0.0.1', self.proxy_port if self.proxy_port else self.port))
def close_client(self):
if self.client_sock:
self.client_sock.close()
def send_packet(self, packet):
if self.verbose:
print("CLIENT: ", packet)
self.client_sock.sendall(packet)
def recv_packet(self):
try:
return self.client_sock.recv(4096)
except TimeoutError:
if self.verbose:
traceback.print_exc()
return False
def sendCheckData(self, data, get_data=False):
self.connect_client()
self.send_packet(data)
received_data = self.recv_packet()
self.close_client()
if get_data:
return received_data
return received_data == data