c++ refactoring, init pypi projects, gh action added

This commit is contained in:
Domingo Dirutigliano
2025-02-09 22:32:48 +01:00
parent 3895984750
commit 49fea55bc7
30 changed files with 1361 additions and 515 deletions

View File

View File

@@ -0,0 +1,171 @@
from modules.nfregex.nftables import FiregexTables
from utils import run_func
from modules.nfregex.models import Service, Regex
import re
import os
import asyncio
import traceback
from utils import DEBUG
from fastapi import HTTPException
nft = FiregexTables()
class RegexFilter:
def __init__(
self, regex,
is_case_sensitive=True,
input_mode=False,
output_mode=False,
blocked_packets=0,
id=None,
update_func = None
):
self.regex = regex
self.is_case_sensitive = is_case_sensitive
if input_mode == output_mode:
input_mode = output_mode = True # (False, False) == (True, True)
self.input_mode = input_mode
self.output_mode = output_mode
self.blocked = blocked_packets
self.id = id
self.update_func = update_func
self.compiled_regex = self.compile()
@classmethod
def from_regex(cls, regex:Regex, update_func = None):
return cls(
id=regex.id, regex=regex.regex, is_case_sensitive=regex.is_case_sensitive,
blocked_packets=regex.blocked_packets,
input_mode = regex.mode in ["C","B"], output_mode=regex.mode in ["S","B"],
update_func = update_func
)
def compile(self):
if isinstance(self.regex, str):
self.regex = self.regex.encode()
if not isinstance(self.regex, bytes):
raise Exception("Invalid Regex Paramether")
re.compile(self.regex) # raise re.error if it's invalid!
case_sensitive = "1" if self.is_case_sensitive else "0"
if self.input_mode:
yield case_sensitive + "C" + self.regex.hex()
if self.output_mode:
yield case_sensitive + "S" + self.regex.hex()
async def update(self):
if self.update_func:
await run_func(self.update_func, self)
class FiregexInterceptor:
def __init__(self):
self.srv:Service
self.filter_map_lock:asyncio.Lock
self.filter_map: dict[str, RegexFilter]
self.regex_filters: set[RegexFilter]
self.update_config_lock:asyncio.Lock
self.process:asyncio.subprocess.Process
self.update_task: asyncio.Task
self.ack_arrived = False
self.ack_status = None
self.ack_fail_what = ""
self.ack_lock = asyncio.Lock()
@classmethod
async def start(cls, srv: Service):
self = cls()
self.srv = srv
self.filter_map_lock = asyncio.Lock()
self.update_config_lock = asyncio.Lock()
queue_range = await self._start_binary()
self.update_task = asyncio.create_task(self.update_blocked())
nft.add(self.srv, queue_range)
if not self.ack_lock.locked():
await self.ack_lock.acquire()
return self
async def _start_binary(self):
proxy_binary_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),"../cppqueue")
self.process = await asyncio.create_subprocess_exec(
proxy_binary_path,
stdout=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE,
env={"MATCH_MODE": "stream" if self.srv.proto == "tcp" else "block", "NTHREADS": os.getenv("NTHREADS","1")},
)
line_fut = self.process.stdout.readuntil()
try:
line_fut = await asyncio.wait_for(line_fut, timeout=3)
except asyncio.TimeoutError:
self.process.kill()
raise Exception("Invalid binary output")
line = line_fut.decode()
if line.startswith("QUEUES "):
params = line.split()
return (int(params[1]), int(params[2]))
else:
self.process.kill()
raise Exception("Invalid binary output")
async def update_blocked(self):
try:
while True:
line = (await self.process.stdout.readuntil()).decode()
if DEBUG:
print(line)
if line.startswith("BLOCKED "):
regex_id = line.split()[1]
async with self.filter_map_lock:
if regex_id in self.filter_map:
self.filter_map[regex_id].blocked+=1
await self.filter_map[regex_id].update()
if line.startswith("ACK "):
self.ack_arrived = True
self.ack_status = line.split()[1].upper() == "OK"
if not self.ack_status:
self.ack_fail_what = " ".join(line.split()[2:])
self.ack_lock.release()
except asyncio.CancelledError:
pass
except asyncio.IncompleteReadError:
pass
except Exception:
traceback.print_exc()
async def stop(self):
self.update_task.cancel()
if self.process and self.process.returncode is None:
self.process.kill()
async def _update_config(self, filters_codes):
async with self.update_config_lock:
self.process.stdin.write((" ".join(filters_codes)+"\n").encode())
await self.process.stdin.drain()
try:
async with asyncio.timeout(3):
await self.ack_lock.acquire()
except TimeoutError:
pass
if not self.ack_arrived or not self.ack_status:
raise HTTPException(status_code=500, detail=f"NFQ error: {self.ack_fail_what}")
async def reload(self, filters:list[RegexFilter]):
async with self.filter_map_lock:
self.filter_map = self.compile_filters(filters)
filters_codes = self.get_filter_codes()
await self._update_config(filters_codes)
def get_filter_codes(self):
filters_codes = list(self.filter_map.keys())
filters_codes.sort(key=lambda a: self.filter_map[a].blocked, reverse=True)
return filters_codes
def compile_filters(self, filters:list[RegexFilter]):
res = {}
for filter_obj in filters:
try:
raw_filters = filter_obj.compile()
for filter in raw_filters:
res[filter] = filter_obj
except Exception:
pass
return res

View File

@@ -0,0 +1,119 @@
import asyncio
from modules.nfregex.firegex import FiregexInterceptor, RegexFilter
from modules.nfregex.nftables import FiregexTables, FiregexFilter
from modules.nfregex.models import Regex, Service
from utils.sqlite import SQLite
class STATUS:
STOP = "stop"
ACTIVE = "active"
nft = FiregexTables()
class ServiceManager:
def __init__(self, srv: Service, db):
self.srv = srv
self.db = db
self.status = STATUS.STOP
self.filters: dict[int, FiregexFilter] = {}
self.lock = asyncio.Lock()
self.interceptor = None
async def _update_filters_from_db(self):
regexes = [
Regex.from_dict(ele) for ele in
self.db.query("SELECT * FROM regexes WHERE service_id = ? AND active=1;", self.srv.id)
]
#Filter check
old_filters = set(self.filters.keys())
new_filters = set([f.id for f in regexes])
#remove old filters
for f in old_filters:
if f not in new_filters:
del self.filters[f]
#add new filters
for f in new_filters:
if f not in old_filters:
filter = [ele for ele in regexes if ele.id == f][0]
self.filters[f] = RegexFilter.from_regex(filter, self._stats_updater)
if self.interceptor:
await self.interceptor.reload(self.filters.values())
def __update_status_db(self, status):
self.db.query("UPDATE services SET status = ? WHERE service_id = ?;", status, self.srv.id)
async def next(self,to):
async with self.lock:
if (self.status, to) == (STATUS.ACTIVE, STATUS.STOP):
await self.stop()
self._set_status(to)
# PAUSE -> ACTIVE
elif (self.status, to) == (STATUS.STOP, STATUS.ACTIVE):
await self.restart()
def _stats_updater(self,filter:RegexFilter):
self.db.query("UPDATE regexes SET blocked_packets = ? WHERE regex_id = ?;", filter.blocked, filter.id)
def _set_status(self,status):
self.status = status
self.__update_status_db(status)
async def start(self):
if not self.interceptor:
nft.delete(self.srv)
self.interceptor = await FiregexInterceptor.start(self.srv)
await self._update_filters_from_db()
self._set_status(STATUS.ACTIVE)
async def stop(self):
nft.delete(self.srv)
if self.interceptor:
await self.interceptor.stop()
self.interceptor = None
async def restart(self):
await self.stop()
await self.start()
async def update_filters(self):
async with self.lock:
await self._update_filters_from_db()
class FirewallManager:
def __init__(self, db:SQLite):
self.db = db
self.service_table: dict[str, ServiceManager] = {}
self.lock = asyncio.Lock()
async def close(self):
for key in list(self.service_table.keys()):
await self.remove(key)
async def remove(self,srv_id):
async with self.lock:
if srv_id in self.service_table:
await self.service_table[srv_id].next(STATUS.STOP)
del self.service_table[srv_id]
async def init(self):
nft.init()
await self.reload()
async def reload(self):
async with self.lock:
for srv in self.db.query('SELECT * FROM services;'):
srv = Service.from_dict(srv)
if srv.id in self.service_table:
continue
self.service_table[srv.id] = ServiceManager(srv, self.db)
await self.service_table[srv.id].next(srv.status)
def get(self,srv_id) -> ServiceManager:
if srv_id in self.service_table:
return self.service_table[srv_id]
else:
raise ServiceNotFoundException()
class ServiceNotFoundException(Exception):
pass

View File

@@ -0,0 +1,30 @@
import base64
class Service:
def __init__(self, service_id: str, status: str, port: int, name: str, proto: str, ip_int: str, **other):
self.id = service_id
self.status = status
self.port = port
self.name = name
self.proto = proto
self.ip_int = ip_int
@classmethod
def from_dict(cls, var: dict):
return cls(**var)
class Regex:
def __init__(self, regex_id: int, regex: bytes, mode: str, service_id: str, blocked_packets: int, is_case_sensitive: bool, active: bool, **other):
self.regex = regex
self.mode = mode
self.service_id = service_id
self.blocked_packets = blocked_packets
self.id = regex_id
self.is_case_sensitive = is_case_sensitive
self.active = active
@classmethod
def from_dict(cls, var: dict):
var['regex'] = base64.b64decode(var['regex'])
return cls(**var)

View File

@@ -0,0 +1,105 @@
from modules.nfregex.models import Service
from utils import ip_parse, ip_family, NFTableManager, nftables_int_to_json
class FiregexFilter:
def __init__(self, proto:str, port:int, ip_int:str, target:str, id:int):
self.id = id
self.target = target
self.proto = proto
self.port = int(port)
self.ip_int = str(ip_int)
def __eq__(self, o: object) -> bool:
if isinstance(o, FiregexFilter) or isinstance(o, Service):
return self.port == o.port and self.proto == o.proto and ip_parse(self.ip_int) == ip_parse(o.ip_int)
return False
class FiregexTables(NFTableManager):
input_chain = "nfproxy_input"
output_chain = "nfproxy_output"
def __init__(self):
super().__init__([
{"add":{"chain":{
"family":"inet",
"table":self.table_name,
"name":self.input_chain,
"type":"filter",
"hook":"prerouting",
"prio":-150,
"policy":"accept"
}}},
{"add":{"chain":{
"family":"inet",
"table":self.table_name,
"name":self.output_chain,
"type":"filter",
"hook":"postrouting",
"prio":-150,
"policy":"accept"
}}}
],[
{"flush":{"chain":{"table":self.table_name,"family":"inet", "name":self.input_chain}}},
{"delete":{"chain":{"table":self.table_name,"family":"inet", "name":self.input_chain}}},
{"flush":{"chain":{"table":self.table_name,"family":"inet", "name":self.output_chain}}},
{"delete":{"chain":{"table":self.table_name,"family":"inet", "name":self.output_chain}}},
])
def add(self, srv:Service, queue_range):
for ele in self.get():
if ele.__eq__(srv): return
init, end = queue_range
if init > end: init, end = end, init
self.cmd(
{ "insert":{ "rule": {
"family": "inet",
"table": self.table_name,
"chain": self.output_chain,
"expr": [
{'match': {'left': {'payload': {'protocol': ip_family(srv.ip_int), 'field': 'saddr'}}, 'op': '==', 'right': nftables_int_to_json(srv.ip_int)}},
{'match': {"left": { "payload": {"protocol": str(srv.proto), "field": "sport"}}, "op": "==", "right": int(srv.port)}},
{"queue": {"num": str(init) if init == end else {"range":[init, end] }, "flags": ["bypass"]}}
]
}}},
{"insert":{"rule":{
"family": "inet",
"table": self.table_name,
"chain": self.input_chain,
"expr": [
{'match': {'left': {'payload': {'protocol': ip_family(srv.ip_int), 'field': 'daddr'}}, 'op': '==', 'right': nftables_int_to_json(srv.ip_int)}},
{'match': {"left": { "payload": {"protocol": str(srv.proto), "field": "dport"}}, "op": "==", "right": int(srv.port)}},
{"queue": {"num": str(init) if init == end else {"range":[init, end] }, "flags": ["bypass"]}}
]
}}}
)
def get(self) -> list[FiregexFilter]:
res = []
for filter in self.list_rules(tables=[self.table_name], chains=[self.input_chain,self.output_chain]):
ip_int = None
if isinstance(filter["expr"][0]["match"]["right"],str):
ip_int = str(ip_parse(filter["expr"][0]["match"]["right"]))
else:
ip_int = f'{filter["expr"][0]["match"]["right"]["prefix"]["addr"]}/{filter["expr"][0]["match"]["right"]["prefix"]["len"]}'
res.append(FiregexFilter(
target=filter["chain"],
id=int(filter["handle"]),
proto=filter["expr"][1]["match"]["left"]["payload"]["protocol"],
port=filter["expr"][1]["match"]["right"],
ip_int=ip_int
))
return res
def delete(self, srv:Service):
for filter in self.get():
if filter.__eq__(srv):
self.cmd({ "delete":{ "rule": {
"family": "inet",
"table": self.table_name,
"chain": filter.target,
"handle": filter.id
}}})