From 49fea55bc7e043f702f40b35294c834794d291e2 Mon Sep 17 00:00:00 2001 From: Domingo Dirutigliano Date: Sun, 9 Feb 2025 22:32:48 +0100 Subject: [PATCH 01/11] c++ refactoring, init pypi projects, gh action added --- .github/workflows/docker-image.yml | 19 +- .github/workflows/pypi-publish.yml | 47 ++ .gitignore | 13 +- Dockerfile | 3 +- backend/binsrc/classes/netfilter.cpp | 444 ++++-------------- backend/binsrc/nfproxy-tun.cpp | 20 + backend/binsrc/nfqueue.cpp | 124 +---- backend/binsrc/proxytun/proxytun.cpp | 142 ++++++ .../binsrc/{classes => regex}/regex_rules.cpp | 24 +- backend/binsrc/regex/regexfilter.cpp | 321 +++++++++++++ backend/binsrc/regex/stream_ctx.cpp | 143 ++++++ backend/docker-entrypoint.sh | 2 - backend/modules/nfproxy/__init__.py | 0 backend/modules/nfproxy/firegex.py | 171 +++++++ backend/modules/nfproxy/firewall.py | 119 +++++ backend/modules/nfproxy/models.py | 30 ++ backend/modules/nfproxy/nftables.py | 105 +++++ backend/utils/__init__.py | 2 +- proxy-client/MANIFEST.in | 1 + proxy-client/README.md | 3 + proxy-client/fgex | 7 + proxy-client/fgex-pip/README.md | 5 + proxy-client/fgex-pip/fgex/__init__.py | 1 + proxy-client/fgex-pip/fgex/__main__.py | 6 + proxy-client/fgex-pip/setup.py | 25 + proxy-client/firegex/__init__.py | 7 + proxy-client/firegex/__main__.py | 7 + proxy-client/requirements.txt | 14 + proxy-client/setup.py | 31 ++ start.py | 40 +- 30 files changed, 1361 insertions(+), 515 deletions(-) create mode 100644 .github/workflows/pypi-publish.yml create mode 100644 backend/binsrc/nfproxy-tun.cpp create mode 100644 backend/binsrc/proxytun/proxytun.cpp rename backend/binsrc/{classes => regex}/regex_rules.cpp (91%) create mode 100644 backend/binsrc/regex/regexfilter.cpp create mode 100644 backend/binsrc/regex/stream_ctx.cpp create mode 100644 backend/modules/nfproxy/__init__.py create mode 100644 backend/modules/nfproxy/firegex.py create mode 100644 backend/modules/nfproxy/firewall.py create mode 100644 backend/modules/nfproxy/models.py create mode 100644 backend/modules/nfproxy/nftables.py create mode 100644 proxy-client/MANIFEST.in create mode 100644 proxy-client/README.md create mode 100755 proxy-client/fgex create mode 100644 proxy-client/fgex-pip/README.md create mode 100644 proxy-client/fgex-pip/fgex/__init__.py create mode 100644 proxy-client/fgex-pip/fgex/__main__.py create mode 100644 proxy-client/fgex-pip/setup.py create mode 100644 proxy-client/firegex/__init__.py create mode 100644 proxy-client/firegex/__main__.py create mode 100644 proxy-client/requirements.txt create mode 100644 proxy-client/setup.py diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 0db7d38..96ff219 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -20,12 +20,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 - - name: Build and run firegex - run: python3 start.py start --psw-no-interactive testpassword - - - name: Run tests - run: cd tests && ./run_tests.sh - - name: Set up QEMU uses: docker/setup-qemu-action@master with: @@ -41,13 +35,20 @@ jobs: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - + - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - + - name: Extract tag name + id: tag + run: echo TAG_NAME=$(echo $GITHUB_REF | cut -d / -f 3) >> $GITHUB_OUTPUT + - name: Update version in setup.py + run: >- + sed -i "s/{{VERSION_PLACEHOLDER}}/${{ steps.tag.outputs.TAG_NAME }}/g" backend/utils/__init__.py; + sed -i "s/{{VERSION_PLACEHOLDER}}/${{ steps.tag.outputs.TAG_NAME }}/g" proxy-client/setup.py; + sed -i "s/{{VERSION_PLACEHOLDER}}/${{ steps.tag.outputs.TAG_NAME }}/g" proxy-client/firegex/__init__.py; - name: Build and push Docker image uses: docker/build-push-action@v5 with: @@ -59,5 +60,3 @@ jobs: labels: ${{ steps.meta.outputs.labels }} cache-from: type=gha cache-to: type=gha,mode=max - - diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml new file mode 100644 index 0000000..dbfe476 --- /dev/null +++ b/.github/workflows/pypi-publish.yml @@ -0,0 +1,47 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: + - published + +permissions: + contents: read + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Extract tag name + id: tag + run: echo TAG_NAME=$(echo $GITHUB_REF | cut -d / -f 3) >> $GITHUB_OUTPUT + - name: Update version in setup.py + run: >- + sed -i "s/{{VERSION_PLACEHOLDER}}/${{ steps.tag.outputs.TAG_NAME }}/g" proxy-client/setup.py; + sed -i "s/{{VERSION_PLACEHOLDER}}/${{ steps.tag.outputs.TAG_NAME }}/g" proxy-client/firegex/__init__.py; + - name: Build package + run: cd client && python -m build && mv ./dist ../ + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.gitignore b/.gitignore index 4221f2b..03e9b7f 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,10 @@ # testing /frontend/coverage - +/proxy-client/firegex.egg-info +/proxy-client/dist +/proxy-client/fgex-pip/fgex.egg-info +/proxy-client/fgex-pip/dist /backend/db/ /backend/db/** /frontend/build/ @@ -21,10 +24,10 @@ /backend/modules/cppqueue /backend/binsrc/cppqueue /backend/modules/proxy -docker-compose.yml -firegex-compose.yml -firegex-compose-tmp-file.yml -firegex.py +/docker-compose.yml +/firegex-compose.yml +/firegex-compose-tmp-file.yml +/firegex.py /tests/benchmark.csv # misc **/.DS_Store diff --git a/Dockerfile b/Dockerfile index 74a1d6d..58a7283 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,7 +27,8 @@ ADD ./backend/requirements.txt /execute/requirements.txt RUN pip3 install --no-cache-dir --break-system-packages -r /execute/requirements.txt --no-warn-script-location COPY ./backend/binsrc /execute/binsrc -RUN g++ binsrc/nfqueue.cpp -o modules/cppqueue -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libhs libmnl) +RUN g++ binsrc/nfqueue.cpp -o modules/cppqueue -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libhs libmnl) +RUN g++ binsrc/nfproxy-tun.cpp -o modules/cppnfproxy -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libmnl) COPY ./backend/ /execute/ COPY --from=frontend /app/dist/ ./frontend/ diff --git a/backend/binsrc/classes/netfilter.cpp b/backend/binsrc/classes/netfilter.cpp index 257e983..fed457c 100644 --- a/backend/binsrc/classes/netfilter.cpp +++ b/backend/binsrc/classes/netfilter.cpp @@ -17,181 +17,61 @@ using Tins::TCPIP::Stream; using Tins::TCPIP::StreamFollower; using namespace std; -#ifndef NETFILTER_CLASSES_HPP -#define NETFILTER_CLASSES_HPP -typedef Tins::TCPIP::StreamIdentifier stream_id; -typedef map matching_map; +#ifndef NETFILTER_CLASS_CPP +#define NETFILTER_CLASS_CPP -/* Considering to use unorder_map using this hash of stream_id +typedef int QueueCallbackFunction(const nlmsghdr *, const mnl_socket*, void *); -namespace std { - template<> - struct hash { - size_t operator()(const stream_id& sid) const - { - return std::hash()(sid.max_address[0] + sid.max_address[1] + sid.max_address[2] + sid.max_address[3] + sid.max_address_port + sid.min_address[0] + sid.min_address[1] + sid.min_address[2] + sid.min_address[3] + sid.min_address_port); - } - }; -} - -*/ - -#ifdef DEBUG -ostream& operator<<(ostream& os, const Tins::TCPIP::StreamIdentifier::address_type &sid){ - bool first_print = false; - for (auto ele: sid){ - if (first_print || ele){ - first_print = true; - os << (int)ele << "."; - } - } - return os; -} - -ostream& operator<<(ostream& os, const stream_id &sid){ - os << sid.max_address << ":" << sid.max_address_port << " -> " << sid.min_address << ":" << sid.min_address_port; - return os; -} -#endif - -struct packet_info; - -struct tcp_stream_tmp { - bool matching_has_been_called = false; - bool result; - packet_info *pkt_info; +struct nfqueue_execution_data_tmp{ + mnl_socket* nl = nullptr; + function queue_cb = nullptr; + void *data = nullptr; }; -struct stream_ctx { - matching_map in_hs_streams; - matching_map out_hs_streams; - hs_scratch_t* in_scratch = nullptr; - hs_scratch_t* out_scratch = nullptr; - u_int16_t latest_config_ver = 0; - StreamFollower follower; - mnl_socket* nl; - tcp_stream_tmp tcp_match_util; +class NfQueueExecutor { + private: + size_t BUF_SIZE = 0xffff + (MNL_SOCKET_BUFFER_SIZE/2); + char *queue_msg_buffer = nullptr; + QueueCallbackFunction * _queue_callback_hook = nullptr; + public: - void clean_scratches(){ - if (out_scratch != nullptr){ - hs_free_scratch(out_scratch); - out_scratch = nullptr; - } - if (in_scratch != nullptr){ - hs_free_scratch(in_scratch); - in_scratch = nullptr; - } - } + unsigned int portid; + u_int16_t queue_num; + mnl_socket* nl = nullptr; - void clean_stream_by_id(stream_id sid){ - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.clean_stream_by_id] Cleaning stream context of " << sid << endl; - #endif - auto stream_search = in_hs_streams.find(sid); - hs_stream_t* stream_match; - if (stream_search != in_hs_streams.end()){ - stream_match = stream_search->second; - if (hs_close_stream(stream_match, in_scratch, nullptr, nullptr) != HS_SUCCESS) { - cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; - throw invalid_argument("Cannot close stream match on hyperscan"); - } - in_hs_streams.erase(stream_search); - } - - stream_search = out_hs_streams.find(sid); - if (stream_search != out_hs_streams.end()){ - stream_match = stream_search->second; - if (hs_close_stream(stream_match, out_scratch, nullptr, nullptr) != HS_SUCCESS) { - cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; - throw invalid_argument("Cannot close stream match on hyperscan"); - } - out_hs_streams.erase(stream_search); - } - } - - void clean(){ - - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.clean] Cleaning stream context" << endl; - #endif - - if (in_scratch){ - for(auto ele: in_hs_streams){ - if (hs_close_stream(ele.second, in_scratch, nullptr, nullptr) != HS_SUCCESS) { - cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; - throw invalid_argument("Cannot close stream match on hyperscan"); - } - } - in_hs_streams.clear(); - } + NfQueueExecutor(u_int16_t queue_num, QueueCallbackFunction* queue_cb): queue_num(queue_num), _queue_callback_hook(queue_cb){ + nl = mnl_socket_open(NETLINK_NETFILTER); - if (out_scratch){ - for(auto ele: out_hs_streams){ - if (hs_close_stream(ele.second, out_scratch, nullptr, nullptr) != HS_SUCCESS) { - cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; - throw invalid_argument("Cannot close stream match on hyperscan"); - } - } - out_hs_streams.clear(); - } - clean_scratches(); - } -}; + if (nl == nullptr) { throw runtime_error( "mnl_socket_open" );} -struct packet_info { - string packet; - string payload; - stream_id sid; - bool is_input; - bool is_tcp; - stream_ctx* sctx; -}; - -typedef bool NetFilterQueueCallback(packet_info &); - -template -class NetfilterQueue { - public: - - size_t BUF_SIZE = 0xffff + (MNL_SOCKET_BUFFER_SIZE/2); - char *buf = nullptr; - unsigned int portid; - u_int16_t queue_num; - stream_ctx sctx; - - NetfilterQueue(u_int16_t queue_num): queue_num(queue_num) { - sctx.nl = mnl_socket_open(NETLINK_NETFILTER); - - if (sctx.nl == nullptr) { throw runtime_error( "mnl_socket_open" );} - - if (mnl_socket_bind(sctx.nl, 0, MNL_SOCKET_AUTOPID) < 0) { - mnl_socket_close(sctx.nl); + if (mnl_socket_bind(nl, 0, MNL_SOCKET_AUTOPID) < 0) { + mnl_socket_close(nl); throw runtime_error( "mnl_socket_bind" ); } - portid = mnl_socket_get_portid(sctx.nl); + portid = mnl_socket_get_portid(nl); - buf = (char*) malloc(BUF_SIZE); + queue_msg_buffer = (char*) malloc(BUF_SIZE); - if (!buf) { - mnl_socket_close(sctx.nl); + if (!queue_msg_buffer) { + mnl_socket_close(nl); throw runtime_error( "allocate receive buffer" ); } - if (send_config_cmd(NFQNL_CFG_CMD_BIND) < 0) { + if (_send_config_cmd(NFQNL_CFG_CMD_BIND) < 0) { _clear(); throw runtime_error( "mnl_socket_send" ); } //TEST if BIND was successful - if (send_config_cmd(NFQNL_CFG_CMD_NONE) < 0) { // SEND A NONE cmmand to generate an error meessage + if (_send_config_cmd(NFQNL_CFG_CMD_NONE) < 0) { // SEND A NONE cmmand to generate an error meessage _clear(); throw runtime_error( "mnl_socket_send" ); } - if (recv_packet() == -1) { //RECV the error message + if (_recv_packet() == -1) { //RECV the error message _clear(); throw runtime_error( "mnl_socket_recvfrom" ); } - struct nlmsghdr *nlh = (struct nlmsghdr *) buf; + struct nlmsghdr *nlh = (struct nlmsghdr *) queue_msg_buffer; if (nlh->nlmsg_type != NLMSG_ERROR) { _clear(); @@ -210,71 +90,32 @@ class NetfilterQueue { } //END TESTING BIND - nlh = nfq_nlmsg_put(buf, NFQNL_MSG_CONFIG, queue_num); + nlh = nfq_nlmsg_put(queue_msg_buffer, NFQNL_MSG_CONFIG, queue_num); nfq_nlmsg_cfg_put_params(nlh, NFQNL_COPY_PACKET, 0xffff); mnl_attr_put_u32(nlh, NFQA_CFG_FLAGS, htonl(NFQA_CFG_F_GSO)); mnl_attr_put_u32(nlh, NFQA_CFG_MASK, htonl(NFQA_CFG_F_GSO)); - if (mnl_socket_sendto(sctx.nl, nlh, nlh->nlmsg_len) < 0) { + if (mnl_socket_sendto(nl, nlh, nlh->nlmsg_len) < 0) { _clear(); throw runtime_error( "mnl_socket_send" ); } } - static void on_data_recv(Stream& stream, stream_ctx* sctx, string data) { - sctx->tcp_match_util.matching_has_been_called = true; - bool result = callback_func(*sctx->tcp_match_util.pkt_info); - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.on_data_recv] result: " << result << endl; - #endif - if (!result){ - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.on_data_recv] Stream matched, removing all data about it" << endl; - #endif - sctx->clean_stream_by_id(sctx->tcp_match_util.pkt_info->sid); - stream.ignore_client_data(); - stream.ignore_server_data(); - } - sctx->tcp_match_util.result = result; - } + NfQueueExecutor(u_int16_t queue_num): NfQueueExecutor(queue_num, nullptr) {} - //Input data filtering - static void on_client_data(Stream& stream, stream_ctx* sctx) { - on_data_recv(stream, sctx, string(stream.client_payload().begin(), stream.client_payload().end())); - } + // --- Functions to be implemented by the user - //Server data filtering - static void on_server_data(Stream& stream, stream_ctx* sctx) { - on_data_recv(stream, sctx, string(stream.server_payload().begin(), stream.server_payload().end())); - } + virtual void before_loop() { + // Do nothing by default + } - static void on_new_stream(Stream& stream, stream_ctx* sctx) { - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.on_new_stream] New stream detected" << endl; - #endif - if (stream.is_partial_stream()) { - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.on_new_stream] Partial stream detected, skipping" << endl; - #endif - return; - } - stream.auto_cleanup_payloads(true); - stream.client_data_callback(bind(on_client_data, placeholders::_1, sctx)); - stream.server_data_callback(bind(on_server_data, placeholders::_1, sctx)); - stream.stream_closed_callback(bind(on_stream_close, placeholders::_1, sctx)); - } - - // A stream was terminated. The second argument is the reason why it was terminated - static void on_stream_close(Stream& stream, stream_ctx* sctx) { - stream_id stream_id = stream_id::make_identifier(stream); - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.on_stream_close] Stream terminated, deleting all data" << endl; - #endif - sctx->clean_stream_by_id(stream_id); - } + virtual void * callback_data_fetch(){ + return nullptr; + } + // --- End of functions to be implemented by the user void run(){ /* @@ -283,18 +124,21 @@ class NetfilterQueue { * in this information, so turn it off. */ int ret = 1; - mnl_socket_setsockopt(sctx.nl, NETLINK_NO_ENOBUFS, &ret, sizeof(int)); + mnl_socket_setsockopt(nl, NETLINK_NO_ENOBUFS, &ret, sizeof(int)); - sctx.follower.new_stream_callback(bind(on_new_stream, placeholders::_1, &sctx)); - sctx.follower.stream_termination_callback(bind(on_stream_close, placeholders::_1, &sctx)); + before_loop(); for (;;) { - ret = recv_packet(); + ret = _recv_packet(); if (ret == -1) { throw runtime_error( "mnl_socket_recvfrom" ); } - - ret = mnl_cb_run(buf, ret, 0, portid, queue_cb, &sctx); + nfqueue_execution_data_tmp data = { + nl: nl, + queue_cb: _queue_callback_hook, + data: callback_data_fetch() + }; + ret = mnl_cb_run(queue_msg_buffer, ret, 0, portid, _real_queue_cb, &data); if (ret < 0){ throw runtime_error( "mnl_cb_run" ); } @@ -302,170 +146,46 @@ class NetfilterQueue { } - ~NetfilterQueue() { - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.~NetfilterQueue] Destructor called" << endl; - #endif - send_config_cmd(NFQNL_CFG_CMD_UNBIND); + ~NfQueueExecutor() { + _send_config_cmd(NFQNL_CFG_CMD_UNBIND); _clear(); } - private: + + private: - ssize_t send_config_cmd(nfqnl_msg_config_cmds cmd){ - struct nlmsghdr *nlh = nfq_nlmsg_put(buf, NFQNL_MSG_CONFIG, queue_num); + static int _real_queue_cb(const nlmsghdr *nlh, void *data_ptr) { + nfqueue_execution_data_tmp* info = (nfqueue_execution_data_tmp*) data_ptr; + if (info->queue_cb == nullptr) return MNL_CB_OK; + return info->queue_cb(nlh, info->nl, info->data); + } + + inline void _clear(){ + if (queue_msg_buffer != nullptr) { + free(queue_msg_buffer); + queue_msg_buffer = nullptr; + } + mnl_socket_close(nl); + nl = nullptr; + } + + inline ssize_t _send_config_cmd(nfqnl_msg_config_cmds cmd){ + struct nlmsghdr *nlh = nfq_nlmsg_put(queue_msg_buffer, NFQNL_MSG_CONFIG, queue_num); nfq_nlmsg_cfg_put_cmd(nlh, AF_INET, cmd); - return mnl_socket_sendto(sctx.nl, nlh, nlh->nlmsg_len); + return mnl_socket_sendto(nl, nlh, nlh->nlmsg_len); } - ssize_t recv_packet(){ - return mnl_socket_recvfrom(sctx.nl, buf, BUF_SIZE); + inline ssize_t _recv_packet(){ + return mnl_socket_recvfrom(nl, queue_msg_buffer, BUF_SIZE); } - void _clear(){ - if (buf != nullptr) { - free(buf); - buf = nullptr; - } - mnl_socket_close(sctx.nl); - sctx.nl = nullptr; - sctx.clean(); - } - - template - static void build_verdict(T packet, uint8_t *payload, uint16_t plen, nlmsghdr *nlh_verdict, nfqnl_msg_packet_hdr *ph, stream_ctx* sctx, bool is_input){ - Tins::TCP* tcp = packet.template find_pdu(); - - if (tcp){ - Tins::PDU* application_layer = tcp->inner_pdu(); - u_int16_t payload_size = 0; - if (application_layer != nullptr){ - payload_size = application_layer->size(); - } - packet_info pktinfo{ - packet: string(payload, payload+plen), - payload: string(payload+plen - payload_size, payload+plen), - sid: stream_id::make_identifier(packet), - is_input: is_input, - is_tcp: true, - sctx: sctx, - }; - sctx->tcp_match_util.matching_has_been_called = false; - sctx->tcp_match_util.pkt_info = &pktinfo; - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.build_verdict] TCP Packet received " << packet.src_addr() << ":" << tcp->sport() << " -> " << packet.dst_addr() << ":" << tcp->dport() << " thr: " << this_thread::get_id() << ", sending to libtins StreamFollower" << endl; - #endif - sctx->follower.process_packet(packet); - #ifdef DEBUG - if (sctx->tcp_match_util.matching_has_been_called){ - cerr << "[DEBUG] [NetfilterQueue.build_verdict] StreamFollower has called matching functions" << endl; - }else{ - cerr << "[DEBUG] [NetfilterQueue.build_verdict] StreamFollower has NOT called matching functions" << endl; - } - #endif - if (sctx->tcp_match_util.matching_has_been_called && !sctx->tcp_match_util.result){ - Tins::PDU* data_layer = tcp->release_inner_pdu(); - if (data_layer != nullptr){ - delete data_layer; - } - tcp->set_flag(Tins::TCP::FIN,1); - tcp->set_flag(Tins::TCP::ACK,1); - tcp->set_flag(Tins::TCP::SYN,0); - nfq_nlmsg_verdict_put_pkt(nlh_verdict, packet.serialize().data(), packet.size()); - } - nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); - }else{ - Tins::UDP* udp = packet.template find_pdu(); - if (!udp){ - throw invalid_argument("Only TCP and UDP are supported"); - } - Tins::PDU* application_layer = udp->inner_pdu(); - u_int16_t payload_size = 0; - if (application_layer != nullptr){ - payload_size = application_layer->size(); - } - if((udp->inner_pdu() == nullptr)){ - nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); - } - packet_info pktinfo{ - packet: string(payload, payload+plen), - payload: string(payload+plen - payload_size, payload+plen), - sid: stream_id::make_identifier(packet), - is_input: is_input, - is_tcp: false, - sctx: sctx, - }; - if (callback_func(pktinfo)){ - nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); - }else{ - nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_DROP ); - } - } - } - - static int queue_cb(const nlmsghdr *nlh, void *data_ptr) - { - stream_ctx* sctx = (stream_ctx*)data_ptr; - - //Extract attributes from the nlmsghdr - nlattr *attr[NFQA_MAX+1] = {}; - - if (nfq_nlmsg_parse(nlh, attr) < 0) { - perror("problems parsing"); - return MNL_CB_ERROR; - } - if (attr[NFQA_PACKET_HDR] == nullptr) { - fputs("metaheader not set\n", stderr); - return MNL_CB_ERROR; - } - if (attr[NFQA_MARK] == nullptr) { - fputs("mark not set\n", stderr); - return MNL_CB_ERROR; - } - //Get Payload - uint16_t plen = mnl_attr_get_payload_len(attr[NFQA_PAYLOAD]); - uint8_t *payload = (uint8_t *)mnl_attr_get_payload(attr[NFQA_PAYLOAD]); - - //Return result to the kernel - struct nfqnl_msg_packet_hdr *ph = (nfqnl_msg_packet_hdr*) mnl_attr_get_payload(attr[NFQA_PACKET_HDR]); - struct nfgenmsg *nfg = (nfgenmsg *)mnl_nlmsg_get_payload(nlh); - char buf[MNL_SOCKET_BUFFER_SIZE]; - struct nlmsghdr *nlh_verdict; - struct nlattr *nest; - - nlh_verdict = nfq_nlmsg_put(buf, NFQNL_MSG_VERDICT, ntohs(nfg->res_id)); - - bool is_input = ntohl(mnl_attr_get_u32(attr[NFQA_MARK])) & 0x1; // == 0x1337 that is odd - #ifdef DEBUG - cerr << "[DEBUG] [NetfilterQueue.queue_cb] Packet received" << endl; - cerr << "[DEBUG] [NetfilterQueue.queue_cb] Packet ID: " << ntohl(ph->packet_id) << endl; - cerr << "[DEBUG] [NetfilterQueue.queue_cb] Payload size: " << plen << endl; - cerr << "[DEBUG] [NetfilterQueue.queue_cb] Is input: " << is_input << endl; - #endif - - // Check IP protocol version - if ( (payload[0] & 0xf0) == 0x40 ){ - build_verdict(Tins::IP(payload, plen), payload, plen, nlh_verdict, ph, sctx, is_input); - }else{ - build_verdict(Tins::IPv6(payload, plen), payload, plen, nlh_verdict, ph, sctx, is_input); - } - - nest = mnl_attr_nest_start(nlh_verdict, NFQA_CT); - mnl_attr_put_u32(nlh_verdict, CTA_MARK, htonl(42)); - mnl_attr_nest_end(nlh_verdict, nest); - - if (mnl_socket_sendto(sctx->nl, nlh_verdict, nlh_verdict->nlmsg_len) < 0) { - throw runtime_error( "mnl_socket_send" ); - } - - return MNL_CB_OK; - } - }; -template + +template >> class NFQueueSequence{ + private: - vector *> nfq; + vector nfq; uint16_t _init; uint16_t _end; vector threads; @@ -474,7 +194,7 @@ class NFQueueSequence{ NFQueueSequence(uint16_t seq_len){ if (seq_len <= 0) throw invalid_argument("seq_len <= 0"); - nfq = vector*>(seq_len); + nfq = vector(seq_len); _init = QUEUE_BASE_NUM; while(nfq[0] == nullptr){ if (_init+seq_len-1 >= 65536){ @@ -482,7 +202,7 @@ class NFQueueSequence{ } for (int i=0;i(_init+i); + nfq[i] = new Executor(_init+i); }catch(const invalid_argument e){ for(int j = 0; j < i; j++) { delete nfq[j]; @@ -499,7 +219,9 @@ class NFQueueSequence{ void start(){ if (threads.size() != 0) throw runtime_error("NFQueueSequence: already started!"); for (int i=0;i::run, nfq[i])); + threads.push_back(thread([executor = nfq[i]](){ + executor->run(); + })); } } @@ -524,4 +246,4 @@ class NFQueueSequence{ } }; -#endif // NETFILTER_CLASSES_HPP \ No newline at end of file +#endif // NETFILTER_CLASS_CPP \ No newline at end of file diff --git a/backend/binsrc/nfproxy-tun.cpp b/backend/binsrc/nfproxy-tun.cpp new file mode 100644 index 0000000..9443f04 --- /dev/null +++ b/backend/binsrc/nfproxy-tun.cpp @@ -0,0 +1,20 @@ +#include "proxytun/proxytun.cpp" +#include "utils.hpp" +#include +#include + +using namespace std; + +int main(int argc, char *argv[]){ + int n_of_threads = 1; + char * n_threads_str = getenv("NTHREADS"); + if (n_threads_str != nullptr) n_of_threads = ::atoi(n_threads_str); + if(n_of_threads <= 0) n_of_threads = 1; + + NFQueueSequence queues(n_of_threads); + queues.start(); + + osyncstream(cout) << "QUEUES " << queues.init() << " " << queues.end() << endl; + cerr << "[info] [main] Queues: " << queues.init() << ":" << queues.end() << " threads assigned: " << n_of_threads << endl; + +} diff --git a/backend/binsrc/nfqueue.cpp b/backend/binsrc/nfqueue.cpp index a97fd88..3d22fe5 100644 --- a/backend/binsrc/nfqueue.cpp +++ b/backend/binsrc/nfqueue.cpp @@ -1,12 +1,11 @@ -#include "classes/regex_rules.cpp" +#include "regex/regex_rules.cpp" +#include "regex/regexfilter.cpp" #include "classes/netfilter.cpp" -#include "utils.hpp" +#include #include using namespace std; -shared_ptr regex_config; - void config_updater (){ string line; while (true){ @@ -33,124 +32,15 @@ void config_updater (){ try{ regex_config.reset(new RegexRules(raw_rules, regex_config->stream_mode())); cerr << "[info] [updater] Config update done to ver "<< regex_config->ver() << endl; - cout << "ACK OK" << endl; + osyncstream(cout) << "ACK OK" << endl; }catch(const std::exception& e){ cerr << "[error] [updater] Failed to build new configuration!" << endl; - cout << "ACK FAIL " << e.what() << endl; + osyncstream(cout) << "ACK FAIL " << e.what() << endl; } } } -void inline scratch_setup(regex_ruleset &conf, hs_scratch_t* & scratch){ - if (scratch == nullptr && conf.hs_db != nullptr){ - if (hs_alloc_scratch(conf.hs_db, &scratch) != HS_SUCCESS) { - throw invalid_argument("Cannot alloc scratch"); - } - } -} - -struct matched_data{ - unsigned int matched = 0; - bool has_matched = false; -}; - - -bool filter_callback(packet_info& info){ - shared_ptr conf = regex_config; - auto current_version = conf->ver(); - if (current_version != info.sctx->latest_config_ver){ - #ifdef DEBUG - cerr << "[DEBUG] [filter_callback] Configuration has changed (" << current_version << "!=" << info.sctx->latest_config_ver << "), cleaning scratch spaces" << endl; - #endif - info.sctx->clean(); - info.sctx->latest_config_ver = current_version; - } - scratch_setup(conf->input_ruleset, info.sctx->in_scratch); - scratch_setup(conf->output_ruleset, info.sctx->out_scratch); - - hs_database_t* regex_matcher = info.is_input ? conf->input_ruleset.hs_db : conf->output_ruleset.hs_db; - if (regex_matcher == nullptr){ - return true; - } - - #ifdef DEBUG - cerr << "[DEBUG] [filter_callback] Matching packet with " << (info.is_input ? "input" : "output") << " ruleset" << endl; - #endif - - matched_data match_res; - hs_error_t err; - hs_scratch_t* scratch_space = info.is_input ? info.sctx->in_scratch: info.sctx->out_scratch; - auto match_func = [](unsigned int id, auto from, auto to, auto flags, auto ctx){ - auto res = (matched_data*)ctx; - res->has_matched = true; - res->matched = id; - return -1; // Stop matching - }; - hs_stream_t* stream_match; - if (conf->stream_mode()){ - matching_map* match_map = info.is_input ? &info.sctx->in_hs_streams : &info.sctx->out_hs_streams; - #ifdef DEBUG - cerr << "[DEBUG] [filter_callback] Dumping match_map " << match_map << endl; - for (auto ele: *match_map){ - cerr << "[DEBUG] [filter_callback] " << ele.first << " -> " << ele.second << endl; - } - cerr << "[DEBUG] [filter_callback] End of match_map" << endl; - #endif - auto stream_search = match_map->find(info.sid); - - if (stream_search == match_map->end()){ - - #ifdef DEBUG - cerr << "[DEBUG] [filter_callback] Creating new stream matcher for " << info.sid << endl; - #endif - if (hs_open_stream(regex_matcher, 0, &stream_match) != HS_SUCCESS) { - cerr << "[error] [filter_callback] Error opening the stream matcher (hs)" << endl; - throw invalid_argument("Cannot open stream match on hyperscan"); - } - if (info.is_tcp){ - match_map->insert_or_assign(info.sid, stream_match); - } - }else{ - stream_match = stream_search->second; - } - #ifdef DEBUG - cerr << "[DEBUG] [filter_callback] Matching as a stream" << endl; - #endif - err = hs_scan_stream( - stream_match,info.payload.c_str(), info.payload.length(), - 0, scratch_space, match_func, &match_res - ); - }else{ - #ifdef DEBUG - cerr << "[DEBUG] [filter_callback] Matching as a block" << endl; - #endif - err = hs_scan( - regex_matcher,info.payload.c_str(), info.payload.length(), - 0, scratch_space, match_func, &match_res - ); - } - if ( - !info.is_tcp && conf->stream_mode() && - hs_close_stream(stream_match, scratch_space, nullptr, nullptr) != HS_SUCCESS - ){ - cerr << "[error] [filter_callback] Error closing the stream matcher (hs)" << endl; - throw invalid_argument("Cannot close stream match on hyperscan"); - } - if (err != HS_SUCCESS && err != HS_SCAN_TERMINATED) { - cerr << "[error] [filter_callback] Error while matching the stream (hs)" << endl; - throw invalid_argument("Error while matching the stream with hyperscan"); - } - if (match_res.has_matched){ - auto rules_vector = info.is_input ? conf->input_ruleset.regexes : conf->output_ruleset.regexes; - stringstream msg; - msg << "BLOCKED " << rules_vector[match_res.matched] << "\n"; - cout << msg.str() << flush; - return false; - } - return true; -} - int main(int argc, char *argv[]){ int n_of_threads = 1; char * n_threads_str = getenv("NTHREADS"); @@ -165,10 +55,10 @@ int main(int argc, char *argv[]){ regex_config.reset(new RegexRules(stream_mode)); - NFQueueSequence queues(n_of_threads); + NFQueueSequence queues(n_of_threads); queues.start(); - cout << "QUEUES " << queues.init() << " " << queues.end() << endl; + osyncstream(cout) << "QUEUES " << queues.init() << " " << queues.end() << endl; cerr << "[info] [main] Queues: " << queues.init() << ":" << queues.end() << " threads assigned: " << n_of_threads << " stream mode: " << stream_mode << endl; config_updater(); diff --git a/backend/binsrc/proxytun/proxytun.cpp b/backend/binsrc/proxytun/proxytun.cpp new file mode 100644 index 0000000..22c88ac --- /dev/null +++ b/backend/binsrc/proxytun/proxytun.cpp @@ -0,0 +1,142 @@ +#ifndef PROXY_TUNNEL_CPP +#define PROXY_TUNNEL_CPP + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "../classes/netfilter.cpp" +#include + +using Tins::TCPIP::Stream; +using Tins::TCPIP::StreamFollower; +using namespace std; + +typedef Tins::TCPIP::StreamIdentifier stream_id; + +class SocketTunnelQueue: public NfQueueExecutor { + public: + + StreamFollower follower; + + void before_loop() override { + follower.new_stream_callback(bind(on_new_stream, placeholders::_1)); + follower.stream_termination_callback(bind(on_stream_close, placeholders::_1)); + } + + void * callback_data_fetch() override{ + return nullptr; + } + + static bool filter_action(){ + return true; + } + + static void on_data_recv(Stream& stream, string data, bool is_input) { + bool result = filter_action(); + if (!result){ + stream.ignore_client_data(); + stream.ignore_server_data(); + } + } + + //Input data filtering + static void on_client_data(Stream& stream) { + on_data_recv(stream, string(stream.client_payload().begin(), stream.client_payload().end()), true); + } + + //Server data filtering + static void on_server_data(Stream& stream) { + on_data_recv(stream, string(stream.server_payload().begin(), stream.server_payload().end()), false); + } + + + // A stream was terminated. The second argument is the reason why it was terminated + static void on_stream_close(Stream& stream) { + stream_id stream_id = stream_id::make_identifier(stream); + } + + static void on_new_stream(Stream& stream) { + stream.auto_cleanup_payloads(true); + if (stream.is_partial_stream()) { + return; + } + stream.client_data_callback(bind(on_client_data, placeholders::_1)); + stream.server_data_callback(bind(on_server_data, placeholders::_1)); + stream.stream_closed_callback(bind(on_stream_close, placeholders::_1)); + } + + + template + static void build_verdict(T packet, uint8_t *payload, uint16_t plen, nlmsghdr *nlh_verdict, nfqnl_msg_packet_hdr *ph){ + sctx->tcp_match_util.matching_has_been_called = false; + sctx->follower.process_packet(packet); + if (sctx->tcp_match_util.matching_has_been_called && !sctx->tcp_match_util.result){ + Tins::PDU* data_layer = tcp->release_inner_pdu(); + if (data_layer != nullptr){ + delete data_layer; + } + tcp->set_flag(Tins::TCP::FIN,1); + tcp->set_flag(Tins::TCP::ACK,1); + tcp->set_flag(Tins::TCP::SYN,0); + nfq_nlmsg_verdict_put_pkt(nlh_verdict, packet.serialize().data(), packet.size()); + } + nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); + } + + static int queue_cb(const nlmsghdr *nlh, const mnl_socket* nl, void *data_ptr) { + + //Extract attributes from the nlmsghdr + nlattr *attr[NFQA_MAX+1] = {}; + + if (nfq_nlmsg_parse(nlh, attr) < 0) { + perror("problems parsing"); + return MNL_CB_ERROR; + } + if (attr[NFQA_PACKET_HDR] == nullptr) { + fputs("metaheader not set\n", stderr); + return MNL_CB_ERROR; + } + //Get Payload + uint16_t plen = mnl_attr_get_payload_len(attr[NFQA_PAYLOAD]); + uint8_t *payload = (uint8_t *)mnl_attr_get_payload(attr[NFQA_PAYLOAD]); + + //Return result to the kernel + struct nfqnl_msg_packet_hdr *ph = (nfqnl_msg_packet_hdr*) mnl_attr_get_payload(attr[NFQA_PACKET_HDR]); + struct nfgenmsg *nfg = (nfgenmsg *)mnl_nlmsg_get_payload(nlh); + char buf[MNL_SOCKET_BUFFER_SIZE]; + struct nlmsghdr *nlh_verdict; + + nlh_verdict = nfq_nlmsg_put(buf, NFQNL_MSG_VERDICT, ntohs(nfg->res_id)); + + // Check IP protocol version + if ( (payload[0] & 0xf0) == 0x40 ){ + build_verdict(Tins::IP(payload, plen), payload, plen, nlh_verdict, ph); + }else{ + build_verdict(Tins::IPv6(payload, plen), payload, plen, nlh_verdict, ph); + } + + if (mnl_socket_sendto(nl, nlh_verdict, nlh_verdict->nlmsg_len) < 0) { + throw runtime_error( "mnl_socket_send" ); + } + + return MNL_CB_OK; + } + + SocketTunnelQueue(int queue) : NfQueueExecutor(queue, &queue_cb) {} + + ~SocketTunnelQueue() { + // TODO + } + +}; + +#endif // PROXY_TUNNEL_CPP \ No newline at end of file diff --git a/backend/binsrc/classes/regex_rules.cpp b/backend/binsrc/regex/regex_rules.cpp similarity index 91% rename from backend/binsrc/classes/regex_rules.cpp rename to backend/binsrc/regex/regex_rules.cpp index c01b2a2..8715932 100644 --- a/backend/binsrc/classes/regex_rules.cpp +++ b/backend/binsrc/regex/regex_rules.cpp @@ -1,15 +1,16 @@ +#ifndef REGEX_FILTER_CPP +#define REGEX_FILTER_CPP + #include #include #include #include "../utils.hpp" #include #include +#include using namespace std; -#ifndef REGEX_FILTER_HPP -#define REGEX_FILTER_HPP - enum FilterDirection{ CTOS, STOC }; struct decoded_regex { @@ -170,5 +171,20 @@ class RegexRules{ } }; -#endif // REGEX_FILTER_HPP +shared_ptr regex_config; + +void inline scratch_setup(regex_ruleset &conf, hs_scratch_t* & scratch){ + if (scratch == nullptr && conf.hs_db != nullptr){ + if (hs_alloc_scratch(conf.hs_db, &scratch) != HS_SUCCESS) { + throw invalid_argument("Cannot alloc scratch"); + } + } +} + +struct matched_data{ + unsigned int matched = 0; + bool has_matched = false; +}; + +#endif // REGEX_FILTER_CPP diff --git a/backend/binsrc/regex/regexfilter.cpp b/backend/binsrc/regex/regexfilter.cpp new file mode 100644 index 0000000..bd86817 --- /dev/null +++ b/backend/binsrc/regex/regexfilter.cpp @@ -0,0 +1,321 @@ +#ifndef REGEX_FILTER_CLASS_CPP +#define REGEX_FILTER_CLASS_CPP + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "../classes/netfilter.cpp" +#include "stream_ctx.cpp" +#include "regex_rules.cpp" + +using Tins::TCPIP::Stream; +using Tins::TCPIP::StreamFollower; +using namespace std; + +class RegexQueue: public NfQueueExecutor { + public: + stream_ctx sctx; + + void before_loop() override { + sctx.follower.new_stream_callback(bind(on_new_stream, placeholders::_1, &sctx)); + sctx.follower.stream_termination_callback(bind(on_stream_close, placeholders::_1, &sctx)); + } + + void * callback_data_fetch() override{ + return &sctx; + } + + static bool filter_action(packet_info& info){ + shared_ptr conf = regex_config; + auto current_version = conf->ver(); + if (current_version != info.sctx->latest_config_ver){ + #ifdef DEBUG + cerr << "[DEBUG] [filter_callback] Configuration has changed (" << current_version << "!=" << info.sctx->latest_config_ver << "), cleaning scratch spaces" << endl; + #endif + info.sctx->clean(); + info.sctx->latest_config_ver = current_version; + } + scratch_setup(conf->input_ruleset, info.sctx->in_scratch); + scratch_setup(conf->output_ruleset, info.sctx->out_scratch); + + hs_database_t* regex_matcher = info.is_input ? conf->input_ruleset.hs_db : conf->output_ruleset.hs_db; + if (regex_matcher == nullptr){ + return true; + } + + #ifdef DEBUG + cerr << "[DEBUG] [filter_callback] Matching packet with " << (info.is_input ? "input" : "output") << " ruleset" << endl; + #endif + + matched_data match_res; + hs_error_t err; + hs_scratch_t* scratch_space = info.is_input ? info.sctx->in_scratch: info.sctx->out_scratch; + auto match_func = [](unsigned int id, auto from, auto to, auto flags, auto ctx){ + auto res = (matched_data*)ctx; + res->has_matched = true; + res->matched = id; + return -1; // Stop matching + }; + hs_stream_t* stream_match; + if (conf->stream_mode()){ + matching_map* match_map = info.is_input ? &info.sctx->in_hs_streams : &info.sctx->out_hs_streams; + #ifdef DEBUG + cerr << "[DEBUG] [filter_callback] Dumping match_map " << match_map << endl; + for (auto ele: *match_map){ + cerr << "[DEBUG] [filter_callback] " << ele.first << " -> " << ele.second << endl; + } + cerr << "[DEBUG] [filter_callback] End of match_map" << endl; + #endif + auto stream_search = match_map->find(info.sid); + + if (stream_search == match_map->end()){ + + #ifdef DEBUG + cerr << "[DEBUG] [filter_callback] Creating new stream matcher for " << info.sid << endl; + #endif + if (hs_open_stream(regex_matcher, 0, &stream_match) != HS_SUCCESS) { + cerr << "[error] [filter_callback] Error opening the stream matcher (hs)" << endl; + throw invalid_argument("Cannot open stream match on hyperscan"); + } + if (info.is_tcp){ + match_map->insert_or_assign(info.sid, stream_match); + } + }else{ + stream_match = stream_search->second; + } + #ifdef DEBUG + cerr << "[DEBUG] [filter_callback] Matching as a stream" << endl; + #endif + err = hs_scan_stream( + stream_match,info.payload.c_str(), info.payload.length(), + 0, scratch_space, match_func, &match_res + ); + }else{ + #ifdef DEBUG + cerr << "[DEBUG] [filter_callback] Matching as a block" << endl; + #endif + err = hs_scan( + regex_matcher,info.payload.c_str(), info.payload.length(), + 0, scratch_space, match_func, &match_res + ); + } + if ( + !info.is_tcp && conf->stream_mode() && + hs_close_stream(stream_match, scratch_space, nullptr, nullptr) != HS_SUCCESS + ){ + cerr << "[error] [filter_callback] Error closing the stream matcher (hs)" << endl; + throw invalid_argument("Cannot close stream match on hyperscan"); + } + if (err != HS_SUCCESS && err != HS_SCAN_TERMINATED) { + cerr << "[error] [filter_callback] Error while matching the stream (hs)" << endl; + throw invalid_argument("Error while matching the stream with hyperscan"); + } + if (match_res.has_matched){ + auto rules_vector = info.is_input ? conf->input_ruleset.regexes : conf->output_ruleset.regexes; + stringstream msg; + msg << "BLOCKED " << rules_vector[match_res.matched] << "\n"; + osyncstream(cout) << msg.str() << flush; + return false; + } + return true; + } + + static void on_data_recv(Stream& stream, stream_ctx* sctx, string data) { + sctx->tcp_match_util.matching_has_been_called = true; + bool result = filter_action(*sctx->tcp_match_util.pkt_info); + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.on_data_recv] result: " << result << endl; + #endif + if (!result){ + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.on_data_recv] Stream matched, removing all data about it" << endl; + #endif + sctx->clean_stream_by_id(sctx->tcp_match_util.pkt_info->sid); + stream.ignore_client_data(); + stream.ignore_server_data(); + } + sctx->tcp_match_util.result = result; + } + + //Input data filtering + static void on_client_data(Stream& stream, stream_ctx* sctx) { + on_data_recv(stream, sctx, string(stream.client_payload().begin(), stream.client_payload().end())); + } + + //Server data filtering + static void on_server_data(Stream& stream, stream_ctx* sctx) { + on_data_recv(stream, sctx, string(stream.server_payload().begin(), stream.server_payload().end())); + } + + + // A stream was terminated. The second argument is the reason why it was terminated + static void on_stream_close(Stream& stream, stream_ctx* sctx) { + stream_id stream_id = stream_id::make_identifier(stream); + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.on_stream_close] Stream terminated, deleting all data" << endl; + #endif + sctx->clean_stream_by_id(stream_id); + } + + static void on_new_stream(Stream& stream, stream_ctx* sctx) { + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.on_new_stream] New stream detected" << endl; + #endif + stream.auto_cleanup_payloads(true); + if (stream.is_partial_stream()) { + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.on_new_stream] Partial stream detected, skipping" << endl; + #endif + return; + } + stream.client_data_callback(bind(on_client_data, placeholders::_1, sctx)); + stream.server_data_callback(bind(on_server_data, placeholders::_1, sctx)); + stream.stream_closed_callback(bind(on_stream_close, placeholders::_1, sctx)); + } + + + template + static void build_verdict(T packet, uint8_t *payload, uint16_t plen, nlmsghdr *nlh_verdict, nfqnl_msg_packet_hdr *ph, stream_ctx* sctx, bool is_input){ + Tins::TCP* tcp = packet.template find_pdu(); + + if (tcp){ + Tins::PDU* application_layer = tcp->inner_pdu(); + u_int16_t payload_size = 0; + if (application_layer != nullptr){ + payload_size = application_layer->size(); + } + packet_info pktinfo{ + packet: string(payload, payload+plen), + payload: string(payload+plen - payload_size, payload+plen), + sid: stream_id::make_identifier(packet), + is_input: is_input, + is_tcp: true, + sctx: sctx, + }; + sctx->tcp_match_util.matching_has_been_called = false; + sctx->tcp_match_util.pkt_info = &pktinfo; + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.build_verdict] TCP Packet received " << packet.src_addr() << ":" << tcp->sport() << " -> " << packet.dst_addr() << ":" << tcp->dport() << " thr: " << this_thread::get_id() << ", sending to libtins StreamFollower" << endl; + #endif + sctx->follower.process_packet(packet); + #ifdef DEBUG + if (sctx->tcp_match_util.matching_has_been_called){ + cerr << "[DEBUG] [NetfilterQueue.build_verdict] StreamFollower has called matching functions" << endl; + }else{ + cerr << "[DEBUG] [NetfilterQueue.build_verdict] StreamFollower has NOT called matching functions" << endl; + } + #endif + if (sctx->tcp_match_util.matching_has_been_called && !sctx->tcp_match_util.result){ + Tins::PDU* data_layer = tcp->release_inner_pdu(); + if (data_layer != nullptr){ + delete data_layer; + } + tcp->set_flag(Tins::TCP::FIN,1); + tcp->set_flag(Tins::TCP::ACK,1); + tcp->set_flag(Tins::TCP::SYN,0); + nfq_nlmsg_verdict_put_pkt(nlh_verdict, packet.serialize().data(), packet.size()); + } + nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); + }else{ + Tins::UDP* udp = packet.template find_pdu(); + if (!udp){ + throw invalid_argument("Only TCP and UDP are supported"); + } + Tins::PDU* application_layer = udp->inner_pdu(); + u_int16_t payload_size = 0; + if (application_layer != nullptr){ + payload_size = application_layer->size(); + } + if((udp->inner_pdu() == nullptr)){ + nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); + } + packet_info pktinfo{ + packet: string(payload, payload+plen), + payload: string(payload+plen - payload_size, payload+plen), + sid: stream_id::make_identifier(packet), + is_input: is_input, + is_tcp: false, + sctx: sctx, + }; + if (filter_action(pktinfo)){ + nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_ACCEPT ); + }else{ + nfq_nlmsg_verdict_put(nlh_verdict, ntohl(ph->packet_id), NF_DROP ); + } + } + } + + static int queue_cb(const nlmsghdr *nlh, const mnl_socket* nl, void *data_ptr) { + + stream_ctx* sctx = (stream_ctx*)data_ptr; + + //Extract attributes from the nlmsghdr + nlattr *attr[NFQA_MAX+1] = {}; + + if (nfq_nlmsg_parse(nlh, attr) < 0) { + perror("problems parsing"); + return MNL_CB_ERROR; + } + if (attr[NFQA_PACKET_HDR] == nullptr) { + fputs("metaheader not set\n", stderr); + return MNL_CB_ERROR; + } + if (attr[NFQA_MARK] == nullptr) { + fputs("mark not set\n", stderr); + return MNL_CB_ERROR; + } + //Get Payload + uint16_t plen = mnl_attr_get_payload_len(attr[NFQA_PAYLOAD]); + uint8_t *payload = (uint8_t *)mnl_attr_get_payload(attr[NFQA_PAYLOAD]); + + //Return result to the kernel + struct nfqnl_msg_packet_hdr *ph = (nfqnl_msg_packet_hdr*) mnl_attr_get_payload(attr[NFQA_PACKET_HDR]); + struct nfgenmsg *nfg = (nfgenmsg *)mnl_nlmsg_get_payload(nlh); + char buf[MNL_SOCKET_BUFFER_SIZE]; + struct nlmsghdr *nlh_verdict; + + nlh_verdict = nfq_nlmsg_put(buf, NFQNL_MSG_VERDICT, ntohs(nfg->res_id)); + + bool is_input = ntohl(mnl_attr_get_u32(attr[NFQA_MARK])) & 0x1; // == 0x1337 that is odd + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.queue_cb] Packet received" << endl; + cerr << "[DEBUG] [NetfilterQueue.queue_cb] Packet ID: " << ntohl(ph->packet_id) << endl; + cerr << "[DEBUG] [NetfilterQueue.queue_cb] Payload size: " << plen << endl; + cerr << "[DEBUG] [NetfilterQueue.queue_cb] Is input: " << is_input << endl; + #endif + + // Check IP protocol version + if ( (payload[0] & 0xf0) == 0x40 ){ + build_verdict(Tins::IP(payload, plen), payload, plen, nlh_verdict, ph, sctx, is_input); + }else{ + build_verdict(Tins::IPv6(payload, plen), payload, plen, nlh_verdict, ph, sctx, is_input); + } + + if (mnl_socket_sendto(nl, nlh_verdict, nlh_verdict->nlmsg_len) < 0) { + throw runtime_error( "mnl_socket_send" ); + } + + return MNL_CB_OK; + } + + RegexQueue(int queue) : NfQueueExecutor(queue, &queue_cb) {} + + ~RegexQueue() { + sctx.clean(); + } + +}; + +#endif // REGEX_FILTER_CLASS_CPP \ No newline at end of file diff --git a/backend/binsrc/regex/stream_ctx.cpp b/backend/binsrc/regex/stream_ctx.cpp new file mode 100644 index 0000000..36df1fb --- /dev/null +++ b/backend/binsrc/regex/stream_ctx.cpp @@ -0,0 +1,143 @@ + +#ifndef STREAM_CTX_CPP +#define STREAM_CTX_CPP + +#include +#include +#include +#include + +using Tins::TCPIP::Stream; +using Tins::TCPIP::StreamFollower; +using namespace std; + +typedef Tins::TCPIP::StreamIdentifier stream_id; +typedef map matching_map; + +/* Considering to use unorder_map using this hash of stream_id + +namespace std { + template<> + struct hash { + size_t operator()(const stream_id& sid) const + { + return std::hash()(sid.max_address[0] + sid.max_address[1] + sid.max_address[2] + sid.max_address[3] + sid.max_address_port + sid.min_address[0] + sid.min_address[1] + sid.min_address[2] + sid.min_address[3] + sid.min_address_port); + } + }; +} + +*/ + +#ifdef DEBUG +ostream& operator<<(ostream& os, const Tins::TCPIP::StreamIdentifier::address_type &sid){ + bool first_print = false; + for (auto ele: sid){ + if (first_print || ele){ + first_print = true; + os << (int)ele << "."; + } + } + return os; +} + +ostream& operator<<(ostream& os, const stream_id &sid){ + os << sid.max_address << ":" << sid.max_address_port << " -> " << sid.min_address << ":" << sid.min_address_port; + return os; +} +#endif + + +struct packet_info; + +struct tcp_stream_tmp { + bool matching_has_been_called = false; + bool result; + packet_info *pkt_info; +}; + +struct stream_ctx { + matching_map in_hs_streams; + matching_map out_hs_streams; + hs_scratch_t* in_scratch = nullptr; + hs_scratch_t* out_scratch = nullptr; + u_int16_t latest_config_ver = 0; + StreamFollower follower; + tcp_stream_tmp tcp_match_util; + + void clean_scratches(){ + if (out_scratch != nullptr){ + hs_free_scratch(out_scratch); + out_scratch = nullptr; + } + if (in_scratch != nullptr){ + hs_free_scratch(in_scratch); + in_scratch = nullptr; + } + } + + void clean_stream_by_id(stream_id sid){ + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.clean_stream_by_id] Cleaning stream context of " << sid << endl; + #endif + auto stream_search = in_hs_streams.find(sid); + hs_stream_t* stream_match; + if (stream_search != in_hs_streams.end()){ + stream_match = stream_search->second; + if (hs_close_stream(stream_match, in_scratch, nullptr, nullptr) != HS_SUCCESS) { + cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; + throw invalid_argument("Cannot close stream match on hyperscan"); + } + in_hs_streams.erase(stream_search); + } + + stream_search = out_hs_streams.find(sid); + if (stream_search != out_hs_streams.end()){ + stream_match = stream_search->second; + if (hs_close_stream(stream_match, out_scratch, nullptr, nullptr) != HS_SUCCESS) { + cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; + throw invalid_argument("Cannot close stream match on hyperscan"); + } + out_hs_streams.erase(stream_search); + } + } + + void clean(){ + + #ifdef DEBUG + cerr << "[DEBUG] [NetfilterQueue.clean] Cleaning stream context" << endl; + #endif + + if (in_scratch){ + for(auto ele: in_hs_streams){ + if (hs_close_stream(ele.second, in_scratch, nullptr, nullptr) != HS_SUCCESS) { + cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; + throw invalid_argument("Cannot close stream match on hyperscan"); + } + } + in_hs_streams.clear(); + } + + if (out_scratch){ + for(auto ele: out_hs_streams){ + if (hs_close_stream(ele.second, out_scratch, nullptr, nullptr) != HS_SUCCESS) { + cerr << "[error] [NetfilterQueue.clean_stream_by_id] Error closing the stream matcher (hs)" << endl; + throw invalid_argument("Cannot close stream match on hyperscan"); + } + } + out_hs_streams.clear(); + } + clean_scratches(); + } +}; + +struct packet_info { + string packet; + string payload; + stream_id sid; + bool is_input; + bool is_tcp; + stream_ctx* sctx; +}; + + +#endif // STREAM_CTX_CPP \ No newline at end of file diff --git a/backend/docker-entrypoint.sh b/backend/docker-entrypoint.sh index b8e84cd..b329d1d 100644 --- a/backend/docker-entrypoint.sh +++ b/backend/docker-entrypoint.sh @@ -4,5 +4,3 @@ chown nobody -R /execute/ exec capsh --caps="cap_net_admin+eip cap_setpcap,cap_setuid,cap_setgid+ep" \ --keep=1 --user=nobody --addamb=cap_net_admin -- -c "python3 /execute/app.py DOCKER" - - diff --git a/backend/modules/nfproxy/__init__.py b/backend/modules/nfproxy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/modules/nfproxy/firegex.py b/backend/modules/nfproxy/firegex.py new file mode 100644 index 0000000..71afcf8 --- /dev/null +++ b/backend/modules/nfproxy/firegex.py @@ -0,0 +1,171 @@ +from modules.nfregex.nftables import FiregexTables +from utils import run_func +from modules.nfregex.models import Service, Regex +import re +import os +import asyncio +import traceback +from utils import DEBUG +from fastapi import HTTPException + +nft = FiregexTables() + +class RegexFilter: + def __init__( + self, regex, + is_case_sensitive=True, + input_mode=False, + output_mode=False, + blocked_packets=0, + id=None, + update_func = None + ): + self.regex = regex + self.is_case_sensitive = is_case_sensitive + if input_mode == output_mode: + input_mode = output_mode = True # (False, False) == (True, True) + self.input_mode = input_mode + self.output_mode = output_mode + self.blocked = blocked_packets + self.id = id + self.update_func = update_func + self.compiled_regex = self.compile() + + @classmethod + def from_regex(cls, regex:Regex, update_func = None): + return cls( + id=regex.id, regex=regex.regex, is_case_sensitive=regex.is_case_sensitive, + blocked_packets=regex.blocked_packets, + input_mode = regex.mode in ["C","B"], output_mode=regex.mode in ["S","B"], + update_func = update_func + ) + def compile(self): + if isinstance(self.regex, str): + self.regex = self.regex.encode() + if not isinstance(self.regex, bytes): + raise Exception("Invalid Regex Paramether") + re.compile(self.regex) # raise re.error if it's invalid! + case_sensitive = "1" if self.is_case_sensitive else "0" + if self.input_mode: + yield case_sensitive + "C" + self.regex.hex() + if self.output_mode: + yield case_sensitive + "S" + self.regex.hex() + + async def update(self): + if self.update_func: + await run_func(self.update_func, self) + +class FiregexInterceptor: + + def __init__(self): + self.srv:Service + self.filter_map_lock:asyncio.Lock + self.filter_map: dict[str, RegexFilter] + self.regex_filters: set[RegexFilter] + self.update_config_lock:asyncio.Lock + self.process:asyncio.subprocess.Process + self.update_task: asyncio.Task + self.ack_arrived = False + self.ack_status = None + self.ack_fail_what = "" + self.ack_lock = asyncio.Lock() + + @classmethod + async def start(cls, srv: Service): + self = cls() + self.srv = srv + self.filter_map_lock = asyncio.Lock() + self.update_config_lock = asyncio.Lock() + queue_range = await self._start_binary() + self.update_task = asyncio.create_task(self.update_blocked()) + nft.add(self.srv, queue_range) + if not self.ack_lock.locked(): + await self.ack_lock.acquire() + return self + + async def _start_binary(self): + proxy_binary_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),"../cppqueue") + self.process = await asyncio.create_subprocess_exec( + proxy_binary_path, + stdout=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, + env={"MATCH_MODE": "stream" if self.srv.proto == "tcp" else "block", "NTHREADS": os.getenv("NTHREADS","1")}, + ) + line_fut = self.process.stdout.readuntil() + try: + line_fut = await asyncio.wait_for(line_fut, timeout=3) + except asyncio.TimeoutError: + self.process.kill() + raise Exception("Invalid binary output") + line = line_fut.decode() + if line.startswith("QUEUES "): + params = line.split() + return (int(params[1]), int(params[2])) + else: + self.process.kill() + raise Exception("Invalid binary output") + + async def update_blocked(self): + try: + while True: + line = (await self.process.stdout.readuntil()).decode() + if DEBUG: + print(line) + if line.startswith("BLOCKED "): + regex_id = line.split()[1] + async with self.filter_map_lock: + if regex_id in self.filter_map: + self.filter_map[regex_id].blocked+=1 + await self.filter_map[regex_id].update() + if line.startswith("ACK "): + self.ack_arrived = True + self.ack_status = line.split()[1].upper() == "OK" + if not self.ack_status: + self.ack_fail_what = " ".join(line.split()[2:]) + self.ack_lock.release() + except asyncio.CancelledError: + pass + except asyncio.IncompleteReadError: + pass + except Exception: + traceback.print_exc() + + async def stop(self): + self.update_task.cancel() + if self.process and self.process.returncode is None: + self.process.kill() + + async def _update_config(self, filters_codes): + async with self.update_config_lock: + self.process.stdin.write((" ".join(filters_codes)+"\n").encode()) + await self.process.stdin.drain() + try: + async with asyncio.timeout(3): + await self.ack_lock.acquire() + except TimeoutError: + pass + if not self.ack_arrived or not self.ack_status: + raise HTTPException(status_code=500, detail=f"NFQ error: {self.ack_fail_what}") + + + async def reload(self, filters:list[RegexFilter]): + async with self.filter_map_lock: + self.filter_map = self.compile_filters(filters) + filters_codes = self.get_filter_codes() + await self._update_config(filters_codes) + + def get_filter_codes(self): + filters_codes = list(self.filter_map.keys()) + filters_codes.sort(key=lambda a: self.filter_map[a].blocked, reverse=True) + return filters_codes + + def compile_filters(self, filters:list[RegexFilter]): + res = {} + for filter_obj in filters: + try: + raw_filters = filter_obj.compile() + for filter in raw_filters: + res[filter] = filter_obj + except Exception: + pass + return res + diff --git a/backend/modules/nfproxy/firewall.py b/backend/modules/nfproxy/firewall.py new file mode 100644 index 0000000..d0d5479 --- /dev/null +++ b/backend/modules/nfproxy/firewall.py @@ -0,0 +1,119 @@ +import asyncio +from modules.nfregex.firegex import FiregexInterceptor, RegexFilter +from modules.nfregex.nftables import FiregexTables, FiregexFilter +from modules.nfregex.models import Regex, Service +from utils.sqlite import SQLite + +class STATUS: + STOP = "stop" + ACTIVE = "active" + +nft = FiregexTables() + + +class ServiceManager: + def __init__(self, srv: Service, db): + self.srv = srv + self.db = db + self.status = STATUS.STOP + self.filters: dict[int, FiregexFilter] = {} + self.lock = asyncio.Lock() + self.interceptor = None + + async def _update_filters_from_db(self): + regexes = [ + Regex.from_dict(ele) for ele in + self.db.query("SELECT * FROM regexes WHERE service_id = ? AND active=1;", self.srv.id) + ] + #Filter check + old_filters = set(self.filters.keys()) + new_filters = set([f.id for f in regexes]) + #remove old filters + for f in old_filters: + if f not in new_filters: + del self.filters[f] + #add new filters + for f in new_filters: + if f not in old_filters: + filter = [ele for ele in regexes if ele.id == f][0] + self.filters[f] = RegexFilter.from_regex(filter, self._stats_updater) + if self.interceptor: + await self.interceptor.reload(self.filters.values()) + + def __update_status_db(self, status): + self.db.query("UPDATE services SET status = ? WHERE service_id = ?;", status, self.srv.id) + + async def next(self,to): + async with self.lock: + if (self.status, to) == (STATUS.ACTIVE, STATUS.STOP): + await self.stop() + self._set_status(to) + # PAUSE -> ACTIVE + elif (self.status, to) == (STATUS.STOP, STATUS.ACTIVE): + await self.restart() + + def _stats_updater(self,filter:RegexFilter): + self.db.query("UPDATE regexes SET blocked_packets = ? WHERE regex_id = ?;", filter.blocked, filter.id) + + def _set_status(self,status): + self.status = status + self.__update_status_db(status) + + async def start(self): + if not self.interceptor: + nft.delete(self.srv) + self.interceptor = await FiregexInterceptor.start(self.srv) + await self._update_filters_from_db() + self._set_status(STATUS.ACTIVE) + + async def stop(self): + nft.delete(self.srv) + if self.interceptor: + await self.interceptor.stop() + self.interceptor = None + + async def restart(self): + await self.stop() + await self.start() + + async def update_filters(self): + async with self.lock: + await self._update_filters_from_db() + +class FirewallManager: + def __init__(self, db:SQLite): + self.db = db + self.service_table: dict[str, ServiceManager] = {} + self.lock = asyncio.Lock() + + async def close(self): + for key in list(self.service_table.keys()): + await self.remove(key) + + async def remove(self,srv_id): + async with self.lock: + if srv_id in self.service_table: + await self.service_table[srv_id].next(STATUS.STOP) + del self.service_table[srv_id] + + async def init(self): + nft.init() + await self.reload() + + async def reload(self): + async with self.lock: + for srv in self.db.query('SELECT * FROM services;'): + srv = Service.from_dict(srv) + if srv.id in self.service_table: + continue + self.service_table[srv.id] = ServiceManager(srv, self.db) + await self.service_table[srv.id].next(srv.status) + + def get(self,srv_id) -> ServiceManager: + if srv_id in self.service_table: + return self.service_table[srv_id] + else: + raise ServiceNotFoundException() + +class ServiceNotFoundException(Exception): + pass diff --git a/backend/modules/nfproxy/models.py b/backend/modules/nfproxy/models.py new file mode 100644 index 0000000..0c36890 --- /dev/null +++ b/backend/modules/nfproxy/models.py @@ -0,0 +1,30 @@ +import base64 + +class Service: + def __init__(self, service_id: str, status: str, port: int, name: str, proto: str, ip_int: str, **other): + self.id = service_id + self.status = status + self.port = port + self.name = name + self.proto = proto + self.ip_int = ip_int + + @classmethod + def from_dict(cls, var: dict): + return cls(**var) + + +class Regex: + def __init__(self, regex_id: int, regex: bytes, mode: str, service_id: str, blocked_packets: int, is_case_sensitive: bool, active: bool, **other): + self.regex = regex + self.mode = mode + self.service_id = service_id + self.blocked_packets = blocked_packets + self.id = regex_id + self.is_case_sensitive = is_case_sensitive + self.active = active + + @classmethod + def from_dict(cls, var: dict): + var['regex'] = base64.b64decode(var['regex']) + return cls(**var) \ No newline at end of file diff --git a/backend/modules/nfproxy/nftables.py b/backend/modules/nfproxy/nftables.py new file mode 100644 index 0000000..54c170a --- /dev/null +++ b/backend/modules/nfproxy/nftables.py @@ -0,0 +1,105 @@ +from modules.nfregex.models import Service +from utils import ip_parse, ip_family, NFTableManager, nftables_int_to_json + +class FiregexFilter: + def __init__(self, proto:str, port:int, ip_int:str, target:str, id:int): + self.id = id + self.target = target + self.proto = proto + self.port = int(port) + self.ip_int = str(ip_int) + + def __eq__(self, o: object) -> bool: + if isinstance(o, FiregexFilter) or isinstance(o, Service): + return self.port == o.port and self.proto == o.proto and ip_parse(self.ip_int) == ip_parse(o.ip_int) + return False + +class FiregexTables(NFTableManager): + input_chain = "nfproxy_input" + output_chain = "nfproxy_output" + + def __init__(self): + super().__init__([ + {"add":{"chain":{ + "family":"inet", + "table":self.table_name, + "name":self.input_chain, + "type":"filter", + "hook":"prerouting", + "prio":-150, + "policy":"accept" + }}}, + {"add":{"chain":{ + "family":"inet", + "table":self.table_name, + "name":self.output_chain, + "type":"filter", + "hook":"postrouting", + "prio":-150, + "policy":"accept" + }}} + ],[ + {"flush":{"chain":{"table":self.table_name,"family":"inet", "name":self.input_chain}}}, + {"delete":{"chain":{"table":self.table_name,"family":"inet", "name":self.input_chain}}}, + {"flush":{"chain":{"table":self.table_name,"family":"inet", "name":self.output_chain}}}, + {"delete":{"chain":{"table":self.table_name,"family":"inet", "name":self.output_chain}}}, + ]) + + def add(self, srv:Service, queue_range): + + for ele in self.get(): + if ele.__eq__(srv): return + + init, end = queue_range + if init > end: init, end = end, init + self.cmd( + { "insert":{ "rule": { + "family": "inet", + "table": self.table_name, + "chain": self.output_chain, + "expr": [ + {'match': {'left': {'payload': {'protocol': ip_family(srv.ip_int), 'field': 'saddr'}}, 'op': '==', 'right': nftables_int_to_json(srv.ip_int)}}, + {'match': {"left": { "payload": {"protocol": str(srv.proto), "field": "sport"}}, "op": "==", "right": int(srv.port)}}, + {"queue": {"num": str(init) if init == end else {"range":[init, end] }, "flags": ["bypass"]}} + ] + }}}, + {"insert":{"rule":{ + "family": "inet", + "table": self.table_name, + "chain": self.input_chain, + "expr": [ + {'match': {'left': {'payload': {'protocol': ip_family(srv.ip_int), 'field': 'daddr'}}, 'op': '==', 'right': nftables_int_to_json(srv.ip_int)}}, + {'match': {"left": { "payload": {"protocol": str(srv.proto), "field": "dport"}}, "op": "==", "right": int(srv.port)}}, + {"queue": {"num": str(init) if init == end else {"range":[init, end] }, "flags": ["bypass"]}} + ] + }}} + ) + + + def get(self) -> list[FiregexFilter]: + res = [] + for filter in self.list_rules(tables=[self.table_name], chains=[self.input_chain,self.output_chain]): + ip_int = None + if isinstance(filter["expr"][0]["match"]["right"],str): + ip_int = str(ip_parse(filter["expr"][0]["match"]["right"])) + else: + ip_int = f'{filter["expr"][0]["match"]["right"]["prefix"]["addr"]}/{filter["expr"][0]["match"]["right"]["prefix"]["len"]}' + res.append(FiregexFilter( + target=filter["chain"], + id=int(filter["handle"]), + proto=filter["expr"][1]["match"]["left"]["payload"]["protocol"], + port=filter["expr"][1]["match"]["right"], + ip_int=ip_int + )) + return res + + def delete(self, srv:Service): + for filter in self.get(): + if filter.__eq__(srv): + self.cmd({ "delete":{ "rule": { + "family": "inet", + "table": self.table_name, + "chain": filter.target, + "handle": filter.id + }}}) + \ No newline at end of file diff --git a/backend/utils/__init__.py b/backend/utils/__init__.py index 52e753d..44bf193 100644 --- a/backend/utils/__init__.py +++ b/backend/utils/__init__.py @@ -19,7 +19,7 @@ ON_DOCKER = "DOCKER" in sys.argv DEBUG = "DEBUG" in sys.argv FIREGEX_PORT = int(os.getenv("PORT","4444")) JWT_ALGORITHM: str = "HS256" -API_VERSION = "3.0.0" +API_VERSION = "{{VERSION_PLACEHOLDER}}" if "{" not in "{{VERSION_PLACEHOLDER}}" else "0.0.0" PortType = Annotated[int, Path(gt=0, lt=65536)] diff --git a/proxy-client/MANIFEST.in b/proxy-client/MANIFEST.in new file mode 100644 index 0000000..540b720 --- /dev/null +++ b/proxy-client/MANIFEST.in @@ -0,0 +1 @@ +include requirements.txt \ No newline at end of file diff --git a/proxy-client/README.md b/proxy-client/README.md new file mode 100644 index 0000000..d441b8b --- /dev/null +++ b/proxy-client/README.md @@ -0,0 +1,3 @@ +# Firegex Python Library and CLI + +It's a work in progress! \ No newline at end of file diff --git a/proxy-client/fgex b/proxy-client/fgex new file mode 100755 index 0000000..adcf48a --- /dev/null +++ b/proxy-client/fgex @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +# TODO implement cli start function +from firegex.cli import run + +if __name__ == "__main__": + run() diff --git a/proxy-client/fgex-pip/README.md b/proxy-client/fgex-pip/README.md new file mode 100644 index 0000000..c31e729 --- /dev/null +++ b/proxy-client/fgex-pip/README.md @@ -0,0 +1,5 @@ +# Firegex python library + +Alias of 'firegex' libaray + +It's a work in progress! \ No newline at end of file diff --git a/proxy-client/fgex-pip/fgex/__init__.py b/proxy-client/fgex-pip/fgex/__init__.py new file mode 100644 index 0000000..1bf13c9 --- /dev/null +++ b/proxy-client/fgex-pip/fgex/__init__.py @@ -0,0 +1 @@ +from firegex import * \ No newline at end of file diff --git a/proxy-client/fgex-pip/fgex/__main__.py b/proxy-client/fgex-pip/fgex/__main__.py new file mode 100644 index 0000000..810291c --- /dev/null +++ b/proxy-client/fgex-pip/fgex/__main__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +from firegex.cli import run + +if __name__ == "__main__": + run() diff --git a/proxy-client/fgex-pip/setup.py b/proxy-client/fgex-pip/setup.py new file mode 100644 index 0000000..b4bf8ce --- /dev/null +++ b/proxy-client/fgex-pip/setup.py @@ -0,0 +1,25 @@ +import setuptools + +with open("README.md", "r", encoding="utf-8") as fh: + long_description = fh.read() + +setuptools.setup( + name="fgex", + version="0.0.0", + author="Pwnzer0tt1", + author_email="pwnzer0tt1@poliba.it", + py_modules=["fgex"], + install_requires=["fgex"], + include_package_data=True, + description="Firegex client", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/pwnzer0tt1/firegex", + packages=setuptools.find_packages(), + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Operating System :: OS Independent", + ], + python_requires='>=3.10', +) diff --git a/proxy-client/firegex/__init__.py b/proxy-client/firegex/__init__.py new file mode 100644 index 0000000..5f63222 --- /dev/null +++ b/proxy-client/firegex/__init__.py @@ -0,0 +1,7 @@ + +__version__ = "{{VERSION_PLACEHOLDER}}" if "{" not in "{{VERSION_PLACEHOLDER}}" else "0.0.0" + +#Exported functions +__all__ = [ + +] \ No newline at end of file diff --git a/proxy-client/firegex/__main__.py b/proxy-client/firegex/__main__.py new file mode 100644 index 0000000..56d0693 --- /dev/null +++ b/proxy-client/firegex/__main__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +# TODO implement cli start function +from firegexproxy.cli import run + +if __name__ == "__main__": + run() diff --git a/proxy-client/requirements.txt b/proxy-client/requirements.txt new file mode 100644 index 0000000..593817c --- /dev/null +++ b/proxy-client/requirements.txt @@ -0,0 +1,14 @@ +typer==0.12.3 +requests>=2.32.3 +python-dateutil==2.9.0.post0 +pydantic >= 2 +typing-extensions >= 4.7.1 +textual==0.89.1 +toml==0.10.2 +psutil==6.0.0 +dirhash==0.5.0 +requests-toolbelt==1.0.0 +python-socketio[client]==5.11.4 +orjson + +# TODO choose dependencies \ No newline at end of file diff --git a/proxy-client/setup.py b/proxy-client/setup.py new file mode 100644 index 0000000..feafc5d --- /dev/null +++ b/proxy-client/setup.py @@ -0,0 +1,31 @@ +import setuptools + +with open("README.md", "r", encoding="utf-8") as fh: + long_description = fh.read() + +with open('requirements.txt', 'r', encoding='utf-8') as f: + required = [ele.strip() for ele in f.read().splitlines() if not ele.strip().startswith("#") and ele.strip() != ""] + +VERSION = "{{VERSION_PLACEHOLDER}}" + +setuptools.setup( + name="firegex", + version= VERSION if "{" not in VERSION else "0.0.0", #uv pip install -U . --no-cache-dir for testing + author="Pwnzer0tt1", + author_email="pwnzer0tt1@poliba.it", + scripts=["fgex"], + py_modules=["fgex"], + install_requires=required, + include_package_data=True, + description="Firegex client", + long_description=long_description, + long_description_content_type="text/markdown", + url="https://github.com/pwnzer0tt1/firegex", + packages=setuptools.find_packages(), + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Operating System :: OS Independent", + ], + python_requires='>=3.10', +) diff --git a/start.py b/start.py index 662bc67..7ceb55a 100755 --- a/start.py +++ b/start.py @@ -1,6 +1,12 @@ #!/usr/bin/env python3 from __future__ import annotations -import argparse, sys, platform, os, multiprocessing, subprocess, getpass +import argparse +import sys +import platform +import os +import multiprocessing +import subprocess +import getpass pref = "\033[" reset = f"{pref}0m" @@ -36,7 +42,7 @@ def dict_to_yaml(data, indent_spaces:int=4, base_indent:int=0, additional_spaces spaces = ' '*((indent_spaces*base_indent)+additional_spaces) if isinstance(data, dict): for key, value in data.items(): - if not add_text_on_dict is None: + if add_text_on_dict is not None: spaces_len = len(spaces)-len(add_text_on_dict) spaces = (' '*max(spaces_len, 0))+add_text_on_dict add_text_on_dict = None @@ -76,7 +82,7 @@ def composecmd(cmd, composefile=None): puts("Docker compose not found! please install docker compose!", color=colors.red) def check_already_running(): - return "firegex" in cmd_check(f'docker ps --filter "name=^firegex$"', get_output=True) + return "firegex" in cmd_check('docker ps --filter "name=^firegex$"', get_output=True) def gen_args(args_to_parse: list[str]|None = None): @@ -106,13 +112,13 @@ def gen_args(args_to_parse: list[str]|None = None): parser_restart.add_argument('--logs', required=False, action="store_true", help='Show firegex logs', default=False) args = parser.parse_args(args=args_to_parse) - if not "clear" in args: + if "clear" not in args: args.clear = False - if not "threads" in args or args.threads < 1: + if "threads" not in args or args.threads < 1: args.threads = multiprocessing.cpu_count() - if not "port" in args or args.port < 1: + if "port" not in args or args.port < 1: args.port = 4444 if args.command is None: @@ -126,7 +132,7 @@ def gen_args(args_to_parse: list[str]|None = None): args = gen_args() def is_linux(): - return "linux" in sys.platform and not 'microsoft-standard' in platform.uname().release + return "linux" in sys.platform and 'microsoft-standard' not in platform.uname().release def write_compose(skip_password = True): psw_set = get_password() if not skip_password else None @@ -229,10 +235,13 @@ def get_password(): def volume_exists(): - return "firegex_firegex_data" in cmd_check(f'docker volume ls --filter "name=^firegex_firegex_data$"', get_output=True) + return "firegex_firegex_data" in cmd_check('docker volume ls --filter "name=^firegex_firegex_data$"', get_output=True) def nfqueue_exists(): - import socket, fcntl, os, time + import socket + import fcntl + import os + import time NETLINK_NETFILTER = 12 SOL_NETLINK = 270 @@ -241,7 +250,7 @@ def nfqueue_exists(): nfsock = socket.socket(socket.AF_NETLINK, socket.SOCK_RAW, NETLINK_NETFILTER) fcntl.fcntl(nfsock, fcntl.F_SETFL, os.O_RDONLY|os.O_NONBLOCK) nfsock.setsockopt(SOL_NETLINK, NETLINK_EXT_ACK, 1) - except Exception as e: + except Exception: return False for rev in [3,2,1,0]: @@ -252,10 +261,13 @@ def nfqueue_exists(): nfsock.send(payload) data = nfsock.recv(1024) is_error = data[4] == 2 - if not is_error: return True # The module exists and we have permission to use it + if not is_error: + return True # The module exists and we have permission to use it error_code = int.from_bytes(data[16:16+4], signed=True, byteorder='little') - if error_code == -1: return True # EPERM (the user is not root, but the module exists) - if error_code == -2: pass # ENOENT (the module does not exist) + if error_code == -1: + return True # EPERM (the user is not root, but the module exists) + if error_code == -2: + pass # ENOENT (the module does not exist) else: puts("Error while trying to check if the nfqueue module is loaded, this check will be skipped!", color=colors.yellow) return True @@ -294,7 +306,7 @@ def main(): if check_already_running(): puts("Firegex is already running! use --help to see options useful to manage firegex execution", color=colors.yellow) else: - puts(f"Firegex", color=colors.yellow, end="") + puts("Firegex", color=colors.yellow, end="") puts(" will start on port ", end="") puts(f"{args.port}", color=colors.cyan) write_compose(skip_password=False) From f3ba6dc716513b0ba521d594ff32ff3e399474b2 Mon Sep 17 00:00:00 2001 From: Domingo Dirutigliano Date: Tue, 11 Feb 2025 19:11:30 +0100 Subject: [PATCH 02/11] more RESTful APIs --- Dockerfile | 11 +- backend/binsrc/classes/netfilter.cpp | 6 - backend/binsrc/proxytun/proxytun.cpp | 4 - backend/modules/firewall/firewall.py | 5 +- backend/modules/nfproxy/firegex.py | 2 + backend/modules/nfproxy/firewall.py | 2 + backend/modules/nfproxy/models.py | 19 +- backend/routers/firewall.py | 10 +- backend/routers/nfproxy.py | 260 ++++++++++++++++++ backend/routers/nfregex.py | 27 +- backend/routers/porthijack.py | 14 +- frontend/src/components/Firewall/utils.ts | 10 +- frontend/src/components/NFRegex/utils.ts | 24 +- .../PortHijack/ServiceRow/index.tsx | 34 +-- frontend/src/components/PortHijack/utils.ts | 16 +- frontend/src/js/utils.tsx | 45 ++- start.py | 1 + tests/utils/firegexapi.py | 51 ++-- 18 files changed, 378 insertions(+), 163 deletions(-) create mode 100644 backend/routers/nfproxy.py diff --git a/Dockerfile b/Dockerfile index 58a7283..7eea0ef 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,11 +14,10 @@ RUN bun run build #Building main conteiner -FROM --platform=$TARGETARCH debian:trixie-slim AS base -RUN apt-get update -qq && apt-get upgrade -qq && \ - apt-get install -qq python3-pip build-essential \ - libnetfilter-queue-dev libnfnetlink-dev libmnl-dev libcap2-bin\ - nftables libvectorscan-dev libtins-dev python3-nftables +FROM --platform=$TARGETARCH registry.fedoraproject.org/fedora:latest +RUN dnf -y update && dnf install -y python3-pip @development-tools gcc-c++ \ + libnetfilter_queue-devel libnfnetlink-devel libmnl-devel libcap-ng-utils \ + nftables vectorscan-devel libtins-devel python3-nftables libpcap-devel boost-devel RUN mkdir -p /execute/modules WORKDIR /execute @@ -28,7 +27,7 @@ RUN pip3 install --no-cache-dir --break-system-packages -r /execute/requirements COPY ./backend/binsrc /execute/binsrc RUN g++ binsrc/nfqueue.cpp -o modules/cppqueue -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libhs libmnl) -RUN g++ binsrc/nfproxy-tun.cpp -o modules/cppnfproxy -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libmnl) +#RUN g++ binsrc/nfproxy-tun.cpp -o modules/cppnfproxy -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libmnl) COPY ./backend/ /execute/ COPY --from=frontend /app/dist/ ./frontend/ diff --git a/backend/binsrc/classes/netfilter.cpp b/backend/binsrc/classes/netfilter.cpp index fed457c..5ac7c1b 100644 --- a/backend/binsrc/classes/netfilter.cpp +++ b/backend/binsrc/classes/netfilter.cpp @@ -1,20 +1,14 @@ #include #include #include -#include -#include -#include #include #include #include #include #include #include -#include #include -using Tins::TCPIP::Stream; -using Tins::TCPIP::StreamFollower; using namespace std; #ifndef NETFILTER_CLASS_CPP diff --git a/backend/binsrc/proxytun/proxytun.cpp b/backend/binsrc/proxytun/proxytun.cpp index 22c88ac..dbce409 100644 --- a/backend/binsrc/proxytun/proxytun.cpp +++ b/backend/binsrc/proxytun/proxytun.cpp @@ -133,10 +133,6 @@ class SocketTunnelQueue: public NfQueueExecutor { SocketTunnelQueue(int queue) : NfQueueExecutor(queue, &queue_cb) {} - ~SocketTunnelQueue() { - // TODO - } - }; #endif // PROXY_TUNNEL_CPP \ No newline at end of file diff --git a/backend/modules/firewall/firewall.py b/backend/modules/firewall/firewall.py index b5bb292..171e34f 100644 --- a/backend/modules/firewall/firewall.py +++ b/backend/modules/firewall/firewall.py @@ -130,6 +130,7 @@ class FirewallManager: def allow_dhcp(self): return self.db.get("allow_dhcp", "1") == "1" - @drop_invalid.setter - def allow_dhcp_set(self, value): + @allow_dhcp.setter + def allow_dhcp(self, value): self.db.set("allow_dhcp", "1" if value else "0") + diff --git a/backend/modules/nfproxy/firegex.py b/backend/modules/nfproxy/firegex.py index 71afcf8..6f0e8d2 100644 --- a/backend/modules/nfproxy/firegex.py +++ b/backend/modules/nfproxy/firegex.py @@ -8,6 +8,8 @@ import traceback from utils import DEBUG from fastapi import HTTPException +#TODO copied file, review + nft = FiregexTables() class RegexFilter: diff --git a/backend/modules/nfproxy/firewall.py b/backend/modules/nfproxy/firewall.py index d0d5479..5ff1c39 100644 --- a/backend/modules/nfproxy/firewall.py +++ b/backend/modules/nfproxy/firewall.py @@ -4,6 +4,8 @@ from modules.nfregex.nftables import FiregexTables, FiregexFilter from modules.nfregex.models import Regex, Service from utils.sqlite import SQLite +#TODO copied file, review + class STATUS: STOP = "stop" ACTIVE = "active" diff --git a/backend/modules/nfproxy/models.py b/backend/modules/nfproxy/models.py index 0c36890..24d1087 100644 --- a/backend/modules/nfproxy/models.py +++ b/backend/modules/nfproxy/models.py @@ -1,5 +1,3 @@ -import base64 - class Service: def __init__(self, service_id: str, status: str, port: int, name: str, proto: str, ip_int: str, **other): self.id = service_id @@ -14,17 +12,14 @@ class Service: return cls(**var) -class Regex: - def __init__(self, regex_id: int, regex: bytes, mode: str, service_id: str, blocked_packets: int, is_case_sensitive: bool, active: bool, **other): - self.regex = regex - self.mode = mode - self.service_id = service_id +class PyFilter: + def __init__(self, filter_id:int, name: str, blocked_packets: int, edited_packets: int, active: bool, **other): + self.filter_id = filter_id + self.name = name self.blocked_packets = blocked_packets - self.id = regex_id - self.is_case_sensitive = is_case_sensitive + self.edited_packets = edited_packets self.active = active - + @classmethod def from_dict(cls, var: dict): - var['regex'] = base64.b64decode(var['regex']) - return cls(**var) \ No newline at end of file + return cls(**var) diff --git a/backend/routers/firewall.py b/backend/routers/firewall.py index 8801db9..058d6a1 100644 --- a/backend/routers/firewall.py +++ b/backend/routers/firewall.py @@ -71,7 +71,7 @@ async def get_settings(): """Get the firewall settings""" return firewall.settings -@app.post("/settings/set", response_model=StatusMessageModel) +@app.put("/settings", response_model=StatusMessageModel) async def set_settings(form: FirewallSettings): """Set the firewall settings""" firewall.settings = form @@ -86,13 +86,13 @@ async def get_rule_list(): "enabled": firewall.enabled } -@app.get('/enable', response_model=StatusMessageModel) +@app.post('/enable', response_model=StatusMessageModel) async def enable_firewall(): """Request enabling the firewall""" firewall.enabled = True return await apply_changes() -@app.get('/disable', response_model=StatusMessageModel) +@app.post('/disable', response_model=StatusMessageModel) async def disable_firewall(): """Request disabling the firewall""" firewall.enabled = False @@ -128,9 +128,9 @@ def parse_and_check_rule(rule:RuleModel): return rule -@app.post('/rules/set', response_model=StatusMessageModel) +@app.post('/rules', response_model=StatusMessageModel) async def add_new_service(form: RuleFormAdd): - """Add a new service""" + """Edit rule table""" rules = [parse_and_check_rule(ele) for ele in form.rules] try: db.queries(["DELETE FROM rules"]+ diff --git a/backend/routers/nfproxy.py b/backend/routers/nfproxy.py new file mode 100644 index 0000000..8580404 --- /dev/null +++ b/backend/routers/nfproxy.py @@ -0,0 +1,260 @@ +import secrets +import sqlite3 +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +from modules.nfproxy.nftables import FiregexTables +from modules.nfproxy.firewall import STATUS, FirewallManager +from utils.sqlite import SQLite +from utils import ip_parse, refactor_name, socketio_emit, PortType +from utils.models import ResetRequest, StatusMessageModel + +# TODO copied file, review +class ServiceModel(BaseModel): + service_id: str + status: str + port: PortType + name: str + proto: str + ip_int: str + n_filters: int + edited_packets: int + blocked_packets: int + +class RenameForm(BaseModel): + name:str + +class PyFilterModel(BaseModel): + filter_id: int + name: str + blocked_packets: int + edited_packets: int + active: bool + +class ServiceAddForm(BaseModel): + name: str + port: PortType + proto: str + ip_int: str + +class ServiceAddResponse(BaseModel): + status:str + service_id: str|None = None + +app = APIRouter() + +db = SQLite('db/nft-pyfilters.db', { + 'services': { + 'service_id': 'VARCHAR(100) PRIMARY KEY', + 'status': 'VARCHAR(100) NOT NULL', + 'port': 'INT NOT NULL CHECK(port > 0 and port < 65536)', + 'name': 'VARCHAR(100) NOT NULL UNIQUE', + 'proto': 'VARCHAR(3) NOT NULL CHECK (proto IN ("tcp", "http"))', + 'ip_int': 'VARCHAR(100) NOT NULL', + }, + 'pyfilter': { + 'filter_id': 'INTEGER PRIMARY KEY', + 'name': 'VARCHAR(100) NOT NULL', + 'blocked_packets': 'INTEGER UNSIGNED NOT NULL DEFAULT 0', + 'edited_packets': 'INTEGER UNSIGNED NOT NULL DEFAULT 0', + 'service_id': 'VARCHAR(100) NOT NULL', + 'active' : 'BOOLEAN NOT NULL CHECK (active IN (0, 1)) DEFAULT 1', + 'FOREIGN KEY (service_id)':'REFERENCES services (service_id)', + }, + 'QUERY':[ + "CREATE UNIQUE INDEX IF NOT EXISTS unique_services ON services (port, ip_int, proto);", + "CREATE UNIQUE INDEX IF NOT EXISTS unique_pyfilter_service ON pyfilter (name, service_id);" + ] +}) + +async def refresh_frontend(additional:list[str]=[]): + await socketio_emit(["nfproxy"]+additional) + +async def reset(params: ResetRequest): + if not params.delete: + db.backup() + await firewall.close() + FiregexTables().reset() + if params.delete: + db.delete() + db.init() + else: + db.restore() + try: + await firewall.init() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + +async def startup(): + db.init() + try: + await firewall.init() + except Exception as e: + print("WARNING cannot start firewall:", e) + +async def shutdown(): + db.backup() + await firewall.close() + db.disconnect() + db.restore() + +def gen_service_id(): + while True: + res = secrets.token_hex(8) + if len(db.query('SELECT 1 FROM services WHERE service_id = ?;', res)) == 0: + break + return res + +firewall = FirewallManager(db) + +@app.get('/services', response_model=list[ServiceModel]) +async def get_service_list(): + """Get the list of existent firegex services""" + return db.query(""" + SELECT + s.service_id service_id, + s.status status, + s.port port, + s.name name, + s.proto proto, + s.ip_int ip_int, + COUNT(f.filter_id) n_filters, + COALESCE(SUM(f.blocked_packets),0) blocked_packets, + COALESCE(SUM(f.edited_packets),0) edited_packets + FROM services s LEFT JOIN pyfilter f ON s.service_id = f.service_id + GROUP BY s.service_id; + """) + +@app.get('/services/{service_id}', response_model=ServiceModel) +async def get_service_by_id(service_id: str): + """Get info about a specific service using his id""" + res = db.query(""" + SELECT + s.service_id service_id, + s.status status, + s.port port, + s.name name, + s.proto proto, + s.ip_int ip_int, + COUNT(f.filter_id) n_filters, + COALESCE(SUM(f.blocked_packets),0) blocked_packets, + COALESCE(SUM(f.edited_packets),0) edited_packets + FROM services s LEFT JOIN pyfilter f ON s.service_id = f.service_id + WHERE s.service_id = ? GROUP BY s.service_id; + """, service_id) + if len(res) == 0: + raise HTTPException(status_code=400, detail="This service does not exists!") + return res[0] + +@app.post('/services/{service_id}/stop', response_model=StatusMessageModel) +async def service_stop(service_id: str): + """Request the stop of a specific service""" + await firewall.get(service_id).next(STATUS.STOP) + await refresh_frontend() + return {'status': 'ok'} + +@app.post('/services/{service_id}/start', response_model=StatusMessageModel) +async def service_start(service_id: str): + """Request the start of a specific service""" + await firewall.get(service_id).next(STATUS.ACTIVE) + await refresh_frontend() + return {'status': 'ok'} + +@app.delete('/services/{service_id}', response_model=StatusMessageModel) +async def service_delete(service_id: str): + """Request the deletion of a specific service""" + db.query('DELETE FROM services WHERE service_id = ?;', service_id) + db.query('DELETE FROM pyfilter WHERE service_id = ?;', service_id) + await firewall.remove(service_id) + await refresh_frontend() + return {'status': 'ok'} + +@app.put('/services/{service_id}/rename', response_model=StatusMessageModel) +async def service_rename(service_id: str, form: RenameForm): + """Request to change the name of a specific service""" + form.name = refactor_name(form.name) + if not form.name: + raise HTTPException(status_code=400, detail="The name cannot be empty!") + try: + db.query('UPDATE services SET name=? WHERE service_id = ?;', form.name, service_id) + except sqlite3.IntegrityError: + raise HTTPException(status_code=400, detail="This name is already used") + await refresh_frontend() + return {'status': 'ok'} + +@app.get('/services/{service_id}/pyfilters', response_model=list[PyFilterModel]) +async def get_service_pyfilter_list(service_id: str): + """Get the list of the pyfilters of a service""" + if not db.query("SELECT 1 FROM services s WHERE s.service_id = ?;", service_id): + raise HTTPException(status_code=400, detail="This service does not exists!") + return db.query(""" + SELECT + filter_id, name, blocked_packets, edited_packets, active + FROM pyfilter WHERE service_id = ?; + """, service_id) + +@app.get('/pyfilters/{filter_id}', response_model=PyFilterModel) +async def get_pyfilter_by_id(filter_id: int): + """Get pyfilter info using his id""" + res = db.query(""" + SELECT + filter_id, name, blocked_packets, edited_packets, active + FROM pyfilter WHERE filter_id = ?; + """, filter_id) + if len(res) == 0: + raise HTTPException(status_code=400, detail="This filter does not exists!") + return res[0] + +@app.delete('/pyfilters/{filter_id}', response_model=StatusMessageModel) +async def pyfilter_delete(filter_id: int): + """Delete a pyfilter using his id""" + res = db.query('SELECT * FROM pyfilter WHERE filter_id = ?;', filter_id) + if len(res) != 0: + db.query('DELETE FROM pyfilter WHERE filter_id = ?;', filter_id) + await firewall.get(res[0]["service_id"]).update_filters() + await refresh_frontend() + + return {'status': 'ok'} + +@app.post('/pyfilters/{filter_id}/enable', response_model=StatusMessageModel) +async def pyfilter_enable(filter_id: int): + """Request the enabling of a pyfilter""" + res = db.query('SELECT * FROM pyfilter WHERE filter_id = ?;', filter_id) + if len(res) != 0: + db.query('UPDATE pyfilter SET active=1 WHERE filter_id = ?;', filter_id) + await firewall.get(res[0]["service_id"]).update_filters() + await refresh_frontend() + return {'status': 'ok'} + +@app.post('/pyfilters/{filter_id}/disable', response_model=StatusMessageModel) +async def pyfilter_disable(filter_id: int): + """Request the deactivation of a pyfilter""" + res = db.query('SELECT * FROM pyfilter WHERE filter_id = ?;', filter_id) + if len(res) != 0: + db.query('UPDATE pyfilter SET active=0 WHERE filter_id = ?;', filter_id) + await firewall.get(res[0]["service_id"]).update_filters() + await refresh_frontend() + return {'status': 'ok'} + +@app.post('/services', response_model=ServiceAddResponse) +async def add_new_service(form: ServiceAddForm): + """Add a new service""" + try: + form.ip_int = ip_parse(form.ip_int) + except ValueError: + raise HTTPException(status_code=400, detail="Invalid address") + if form.proto not in ["tcp", "http"]: + raise HTTPException(status_code=400, detail="Invalid protocol") + srv_id = None + try: + srv_id = gen_service_id() + db.query("INSERT INTO services (service_id ,name, port, status, proto, ip_int) VALUES (?, ?, ?, ?, ?, ?)", + srv_id, refactor_name(form.name), form.port, STATUS.STOP, form.proto, form.ip_int) + except sqlite3.IntegrityError: + raise HTTPException(status_code=400, detail="This type of service already exists") + await firewall.reload() + await refresh_frontend() + return {'status': 'ok', 'service_id': srv_id} + +#TODO check all the APIs and add +# 1. API to change the python filter file +# 2. a socketio mechanism to lock the previous feature \ No newline at end of file diff --git a/backend/routers/nfregex.py b/backend/routers/nfregex.py index f5de063..1a41d41 100644 --- a/backend/routers/nfregex.py +++ b/backend/routers/nfregex.py @@ -134,7 +134,7 @@ async def get_service_list(): GROUP BY s.service_id; """) -@app.get('/service/{service_id}', response_model=ServiceModel) +@app.get('/services/{service_id}', response_model=ServiceModel) async def get_service_by_id(service_id: str): """Get info about a specific service using his id""" res = db.query(""" @@ -154,21 +154,21 @@ async def get_service_by_id(service_id: str): raise HTTPException(status_code=400, detail="This service does not exists!") return res[0] -@app.get('/service/{service_id}/stop', response_model=StatusMessageModel) +@app.post('/services/{service_id}/stop', response_model=StatusMessageModel) async def service_stop(service_id: str): """Request the stop of a specific service""" await firewall.get(service_id).next(STATUS.STOP) await refresh_frontend() return {'status': 'ok'} -@app.get('/service/{service_id}/start', response_model=StatusMessageModel) +@app.post('/services/{service_id}/start', response_model=StatusMessageModel) async def service_start(service_id: str): """Request the start of a specific service""" await firewall.get(service_id).next(STATUS.ACTIVE) await refresh_frontend() return {'status': 'ok'} -@app.get('/service/{service_id}/delete', response_model=StatusMessageModel) +@app.delete('/services/{service_id}', response_model=StatusMessageModel) async def service_delete(service_id: str): """Request the deletion of a specific service""" db.query('DELETE FROM services WHERE service_id = ?;', service_id) @@ -177,7 +177,7 @@ async def service_delete(service_id: str): await refresh_frontend() return {'status': 'ok'} -@app.post('/service/{service_id}/rename', response_model=StatusMessageModel) +@app.put('/services/{service_id}/rename', response_model=StatusMessageModel) async def service_rename(service_id: str, form: RenameForm): """Request to change the name of a specific service""" form.name = refactor_name(form.name) @@ -190,7 +190,7 @@ async def service_rename(service_id: str, form: RenameForm): await refresh_frontend() return {'status': 'ok'} -@app.get('/service/{service_id}/regexes', response_model=list[RegexModel]) +@app.get('/services/{service_id}/regexes', response_model=list[RegexModel]) async def get_service_regexe_list(service_id: str): """Get the list of the regexes of a service""" if not db.query("SELECT 1 FROM services s WHERE s.service_id = ?;", service_id): @@ -202,7 +202,7 @@ async def get_service_regexe_list(service_id: str): FROM regexes WHERE service_id = ?; """, service_id) -@app.get('/regex/{regex_id}', response_model=RegexModel) +@app.get('/regexes/{regex_id}', response_model=RegexModel) async def get_regex_by_id(regex_id: int): """Get regex info using his id""" res = db.query(""" @@ -215,7 +215,7 @@ async def get_regex_by_id(regex_id: int): raise HTTPException(status_code=400, detail="This regex does not exists!") return res[0] -@app.get('/regex/{regex_id}/delete', response_model=StatusMessageModel) +@app.delete('/regexes/{regex_id}', response_model=StatusMessageModel) async def regex_delete(regex_id: int): """Delete a regex using his id""" res = db.query('SELECT * FROM regexes WHERE regex_id = ?;', regex_id) @@ -226,7 +226,7 @@ async def regex_delete(regex_id: int): return {'status': 'ok'} -@app.get('/regex/{regex_id}/enable', response_model=StatusMessageModel) +@app.post('/regexes/{regex_id}/enable', response_model=StatusMessageModel) async def regex_enable(regex_id: int): """Request the enabling of a regex""" res = db.query('SELECT * FROM regexes WHERE regex_id = ?;', regex_id) @@ -236,7 +236,7 @@ async def regex_enable(regex_id: int): await refresh_frontend() return {'status': 'ok'} -@app.get('/regex/{regex_id}/disable', response_model=StatusMessageModel) +@app.post('/regexes/{regex_id}/disable', response_model=StatusMessageModel) async def regex_disable(regex_id: int): """Request the deactivation of a regex""" res = db.query('SELECT * FROM regexes WHERE regex_id = ?;', regex_id) @@ -246,7 +246,7 @@ async def regex_disable(regex_id: int): await refresh_frontend() return {'status': 'ok'} -@app.post('/regexes/add', response_model=StatusMessageModel) +@app.post('/regexes', response_model=StatusMessageModel) async def add_new_regex(form: RegexAddForm): """Add a new regex""" try: @@ -263,7 +263,7 @@ async def add_new_regex(form: RegexAddForm): await refresh_frontend() return {'status': 'ok'} -@app.post('/services/add', response_model=ServiceAddResponse) +@app.post('/services', response_model=ServiceAddResponse) async def add_new_service(form: ServiceAddForm): """Add a new service""" try: @@ -299,7 +299,8 @@ async def metrics(): FROM regexes r LEFT JOIN services s ON s.service_id = r.service_id; """) metrics = [] - sanitize = lambda s : s.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n') + def sanitize(s): + return s.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n') for stat in stats: props = f'service_name="{sanitize(stat["name"])}",regex="{sanitize(b64decode(stat["regex"]).decode())}",mode="{stat["mode"]}",is_case_sensitive="{stat["is_case_sensitive"]}"' metrics.append(f'firegex_blocked_packets{{{props}}} {stat["blocked_packets"]}') diff --git a/backend/routers/porthijack.py b/backend/routers/porthijack.py index 8fd3c54..7899ef4 100644 --- a/backend/routers/porthijack.py +++ b/backend/routers/porthijack.py @@ -92,7 +92,7 @@ async def get_service_list(): """Get the list of existent firegex services""" return db.query("SELECT service_id, active, public_port, proxy_port, name, proto, ip_src, ip_dst FROM services;") -@app.get('/service/{service_id}', response_model=ServiceModel) +@app.get('/services/{service_id}', response_model=ServiceModel) async def get_service_by_id(service_id: str): """Get info about a specific service using his id""" res = db.query("SELECT service_id, active, public_port, proxy_port, name, proto, ip_src, ip_dst FROM services WHERE service_id = ?;", service_id) @@ -100,21 +100,21 @@ async def get_service_by_id(service_id: str): raise HTTPException(status_code=400, detail="This service does not exists!") return res[0] -@app.get('/service/{service_id}/stop', response_model=StatusMessageModel) +@app.post('/services/{service_id}/stop', response_model=StatusMessageModel) async def service_stop(service_id: str): """Request the stop of a specific service""" await firewall.get(service_id).disable() await refresh_frontend() return {'status': 'ok'} -@app.get('/service/{service_id}/start', response_model=StatusMessageModel) +@app.post('/services/{service_id}/start', response_model=StatusMessageModel) async def service_start(service_id: str): """Request the start of a specific service""" await firewall.get(service_id).enable() await refresh_frontend() return {'status': 'ok'} -@app.get('/service/{service_id}/delete', response_model=StatusMessageModel) +@app.delete('/services/{service_id}', response_model=StatusMessageModel) async def service_delete(service_id: str): """Request the deletion of a specific service""" db.query('DELETE FROM services WHERE service_id = ?;', service_id) @@ -122,7 +122,7 @@ async def service_delete(service_id: str): await refresh_frontend() return {'status': 'ok'} -@app.post('/service/{service_id}/rename', response_model=StatusMessageModel) +@app.put('/services/{service_id}/rename', response_model=StatusMessageModel) async def service_rename(service_id: str, form: RenameForm): """Request to change the name of a specific service""" form.name = refactor_name(form.name) @@ -139,7 +139,7 @@ class ChangeDestination(BaseModel): ip_dst: str proxy_port: PortType -@app.post('/service/{service_id}/change-destination', response_model=StatusMessageModel) +@app.put('/services/{service_id}/change-destination', response_model=StatusMessageModel) async def service_change_destination(service_id: str, form: ChangeDestination): """Request to change the proxy destination of the service""" @@ -162,7 +162,7 @@ async def service_change_destination(service_id: str, form: ChangeDestination): await refresh_frontend() return {'status': 'ok'} -@app.post('/services/add', response_model=ServiceAddResponse) +@app.post('/services', response_model=ServiceAddResponse) async def add_new_service(form: ServiceAddForm): """Add a new service""" try: diff --git a/frontend/src/components/Firewall/utils.ts b/frontend/src/components/Firewall/utils.ts index fa2419b..c051df8 100644 --- a/frontend/src/components/Firewall/utils.ts +++ b/frontend/src/components/Firewall/utils.ts @@ -1,6 +1,6 @@ import { useQuery } from "@tanstack/react-query" import { ServerResponse } from "../../js/models" -import { getapi, postapi } from "../../js/utils" +import { getapi, postapi, putapi } from "../../js/utils" export enum Protocol { TCP = "tcp", @@ -79,15 +79,15 @@ export const firewall = { return await getapi("firewall/settings") as FirewallSettings; }, setsettings: async(data:FirewallSettings) => { - return await postapi("firewall/settings/set", data) as ServerResponse; + return await putapi("firewall/settings", data) as ServerResponse; }, enable: async() => { - return await getapi("firewall/enable") as ServerResponse; + return await postapi("firewall/enable") as ServerResponse; }, disable: async() => { - return await getapi("firewall/disable") as ServerResponse; + return await postapi("firewall/disable") as ServerResponse; }, ruleset: async (data:RuleAddForm) => { - return await postapi("firewall/rules/set", data) as ServerResponseListed; + return await postapi("firewall/rules", data) as ServerResponseListed; } } \ No newline at end of file diff --git a/frontend/src/components/NFRegex/utils.ts b/frontend/src/components/NFRegex/utils.ts index cdf35f8..1c34bc3 100644 --- a/frontend/src/components/NFRegex/utils.ts +++ b/frontend/src/components/NFRegex/utils.ts @@ -1,5 +1,5 @@ import { RegexFilter, ServerResponse } from "../../js/models" -import { getapi, postapi } from "../../js/utils" +import { deleteapi, getapi, postapi, putapi } from "../../js/utils" import { RegexAddForm } from "../../js/models" import { useQuery, useQueryClient } from "@tanstack/react-query" @@ -40,44 +40,44 @@ export const nfregex = { return await getapi("nfregex/services") as Service[]; }, serviceinfo: async (service_id:string) => { - return await getapi(`nfregex/service/${service_id}`) as Service; + return await getapi(`nfregex/services/${service_id}`) as Service; }, regexdelete: async (regex_id:number) => { - const { status } = await getapi(`nfregex/regex/${regex_id}/delete`) as ServerResponse; + const { status } = await deleteapi(`nfregex/regexes/${regex_id}`) as ServerResponse; return status === "ok"?undefined:status }, regexenable: async (regex_id:number) => { - const { status } = await getapi(`nfregex/regex/${regex_id}/enable`) as ServerResponse; + const { status } = await postapi(`nfregex/regexes/${regex_id}/enable`) as ServerResponse; return status === "ok"?undefined:status }, regexdisable: async (regex_id:number) => { - const { status } = await getapi(`nfregex/regex/${regex_id}/disable`) as ServerResponse; + const { status } = await postapi(`nfregex/regexes/${regex_id}/disable`) as ServerResponse; return status === "ok"?undefined:status }, servicestart: async (service_id:string) => { - const { status } = await getapi(`nfregex/service/${service_id}/start`) as ServerResponse; + const { status } = await postapi(`nfregex/services/${service_id}/start`) as ServerResponse; return status === "ok"?undefined:status }, servicerename: async (service_id:string, name: string) => { - const { status } = await postapi(`nfregex/service/${service_id}/rename`,{ name }) as ServerResponse; + const { status } = await putapi(`nfregex/services/${service_id}/rename`,{ name }) as ServerResponse; return status === "ok"?undefined:status }, servicestop: async (service_id:string) => { - const { status } = await getapi(`nfregex/service/${service_id}/stop`) as ServerResponse; + const { status } = await postapi(`nfregex/services/${service_id}/stop`) as ServerResponse; return status === "ok"?undefined:status }, servicesadd: async (data:ServiceAddForm) => { - return await postapi("nfregex/services/add",data) as ServiceAddResponse; + return await postapi("nfregex/services",data) as ServiceAddResponse; }, servicedelete: async (service_id:string) => { - const { status } = await getapi(`nfregex/service/${service_id}/delete`) as ServerResponse; + const { status } = await deleteapi(`nfregex/services/${service_id}`) as ServerResponse; return status === "ok"?undefined:status }, regexesadd: async (data:RegexAddForm) => { - const { status } = await postapi("nfregex/regexes/add",data) as ServerResponse; + const { status } = await postapi("nfregex/regexes",data) as ServerResponse; return status === "ok"?undefined:status }, serviceregexes: async (service_id:string) => { - return await getapi(`nfregex/service/${service_id}/regexes`) as RegexFilter[]; + return await getapi(`nfregex/services/${service_id}/regexes`) as RegexFilter[]; } } \ No newline at end of file diff --git a/frontend/src/components/PortHijack/ServiceRow/index.tsx b/frontend/src/components/PortHijack/ServiceRow/index.tsx index 9428170..d481f75 100644 --- a/frontend/src/components/PortHijack/ServiceRow/index.tsx +++ b/frontend/src/components/PortHijack/ServiceRow/index.tsx @@ -29,24 +29,6 @@ function ServiceRow({ service }:{ service:Service }) { validate:{ proxy_port: (value) => (value > 0 && value < 65536)? null : "Invalid proxy port" } }) - const onChangeProxyPort = ({proxy_port}:{proxy_port:number}) => { - if (proxy_port === service.proxy_port) return - if (proxy_port > 0 && proxy_port < 65536 && proxy_port !== service.public_port){ - porthijack.changedestination(service.service_id, service.ip_dst, proxy_port).then( res => { - if (res.status === "ok"){ - okNotify(`Service ${service.name} destination port has changed in ${ proxy_port }`, `Successfully changed destination port`) - }else{ - errorNotify(`Error while changing the destination port of ${service.name}`,`Error: ${res.status}`) - } - }).catch( err => { - errorNotify("Request for changing port failed!",`Error: [ ${err} ]`) - }) - }else{ - form.setFieldValue("proxy_port", service.proxy_port) - errorNotify(`Error while changing the destination port of ${service.name}`,`Insert a valid port number`) - } - } - const stopService = async () => { setButtonLoading(true) @@ -119,21 +101,7 @@ function ServiceRow({ service }:{ service:Service }) { - TO {service.ip_dst} : -
portInputRef.current?.blur())}> - {onChangeProxyPort({proxy_port:parseInt(e.target.value)})}} - ref={portInputRef} - {...form.getInputProps("proxy_port")} - /> - + TO {service.ip_dst} : service.proxy_port
diff --git a/frontend/src/components/PortHijack/utils.ts b/frontend/src/components/PortHijack/utils.ts index 80875cc..2c00417 100644 --- a/frontend/src/components/PortHijack/utils.ts +++ b/frontend/src/components/PortHijack/utils.ts @@ -1,5 +1,5 @@ import { ServerResponse } from "../../js/models" -import { getapi, postapi } from "../../js/utils" +import { deleteapi, getapi, postapi, putapi } from "../../js/utils" import { useQuery } from "@tanstack/react-query" export type GeneralStats = { @@ -37,28 +37,28 @@ export const porthijack = { return await getapi("porthijack/services") as Service[]; }, serviceinfo: async (service_id:string) => { - return await getapi(`porthijack/service/${service_id}`) as Service; + return await getapi(`porthijack/services/${service_id}`) as Service; }, servicestart: async (service_id:string) => { - const { status } = await getapi(`porthijack/service/${service_id}/start`) as ServerResponse; + const { status } = await postapi(`porthijack/services/${service_id}/start`) as ServerResponse; return status === "ok"?undefined:status }, servicerename: async (service_id:string, name: string) => { - const { status } = await postapi(`porthijack/service/${service_id}/rename`,{ name }) as ServerResponse; + const { status } = await putapi(`porthijack/services/${service_id}/rename`,{ name }) as ServerResponse; return status === "ok"?undefined:status }, servicestop: async (service_id:string) => { - const { status } = await getapi(`porthijack/service/${service_id}/stop`) as ServerResponse; + const { status } = await postapi(`porthijack/services/${service_id}/stop`) as ServerResponse; return status === "ok"?undefined:status }, servicesadd: async (data:ServiceAddForm) => { - return await postapi("porthijack/services/add",data) as ServiceAddResponse; + return await postapi("porthijack/services",data) as ServiceAddResponse; }, servicedelete: async (service_id:string) => { - const { status } = await getapi(`porthijack/service/${service_id}/delete`) as ServerResponse; + const { status } = await deleteapi(`porthijack/services/${service_id}`) as ServerResponse; return status === "ok"?undefined:status }, changedestination: async (service_id:string, ip_dst:string, proxy_port:number) => { - return await postapi(`porthijack/service/${service_id}/change-destination`, {proxy_port, ip_dst}) as ServerResponse; + return await putapi(`porthijack/services/${service_id}/change-destination`, {proxy_port, ip_dst}) as ServerResponse; } } \ No newline at end of file diff --git a/frontend/src/js/utils.tsx b/frontend/src/js/utils.tsx index ba84bcf..0197205 100644 --- a/frontend/src/js/utils.tsx +++ b/frontend/src/js/utils.tsx @@ -22,26 +22,6 @@ export const queryClient = new QueryClient({ defaultOptions: { queries: { staleTime: Infinity } }}) -export async function getapi(path:string):Promise{ - - return await new Promise((resolve, reject) => { - fetch(`${IS_DEV?`http://${DEV_IP_BACKEND}`:""}/api/${path}`,{ - credentials: "same-origin", - headers: { "Authorization" : "Bearer " + window.localStorage.getItem("access_token")} - }).then(res => { - if(res.status === 401) window.location.reload() - if(!res.ok){ - const errorDefault = res.statusText - return res.json().then( res => reject(getErrorMessageFromServerResponse(res, errorDefault)) ).catch( _err => reject(errorDefault)) - } - res.json().then( res => resolve(res) ).catch( err => reject(err)) - }) - .catch(err => { - reject(err) - }) - }); -} - export function getErrorMessage(e: any) { let error = "Unknown error"; if(typeof e == "string") return e @@ -56,7 +36,6 @@ export function getErrorMessage(e: any) { return error; } - export function getErrorMessageFromServerResponse(e: any, def:string = "Unknown error") { if (e.status){ return e.status @@ -74,17 +53,17 @@ export function getErrorMessageFromServerResponse(e: any, def:string = "Unknown } -export async function postapi(path:string,data:any,is_form:boolean=false):Promise{ +export async function genericapi(method:string,path:string,data:any = undefined, is_form:boolean=false):Promise{ return await new Promise((resolve, reject) => { fetch(`${IS_DEV?`http://${DEV_IP_BACKEND}`:""}/api/${path}`, { - method: 'POST', + method: method, credentials: "same-origin", cache: 'no-cache', headers: { - 'Content-Type': is_form ? 'application/x-www-form-urlencoded' : 'application/json', + ...(data?{'Content-Type': is_form ? 'application/x-www-form-urlencoded' : 'application/json'}:{}), "Authorization" : "Bearer " + window.localStorage.getItem("access_token") }, - body: is_form ? (new URLSearchParams(data)).toString() : JSON.stringify(data) + body: data? (is_form ? (new URLSearchParams(data)).toString() : JSON.stringify(data)) : undefined }).then(res => { if(res.status === 401) window.location.reload() if(res.status === 406) resolve({status:"Wrong Password"}) @@ -100,6 +79,22 @@ export async function postapi(path:string,data:any,is_form:boolean=false):Promis }); } +export async function getapi(path:string):Promise{ + return await genericapi("GET",path) +} + +export async function postapi(path:string,data:any=undefined,is_form:boolean=false):Promise{ + return await genericapi("POST",path,data,is_form) +} + +export async function deleteapi(path:string):Promise{ + return await genericapi("DELETE",path) +} + +export async function putapi(path:string,data:any):Promise{ + return await genericapi("PUT",path,data) +} + export function getMainPath(){ const paths = window.location.pathname.split("/") if (paths.length > 1) return paths[1] diff --git a/start.py b/start.py index 7ceb55a..ca7d1ff 100755 --- a/start.py +++ b/start.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 + from __future__ import annotations import argparse import sys diff --git a/tests/utils/firegexapi.py b/tests/utils/firegexapi.py index ce4a92c..13f7a0c 100644 --- a/tests/utils/firegexapi.py +++ b/tests/utils/firegexapi.py @@ -19,11 +19,17 @@ class BearerSession(): headers["Content-Type"] = "application/x-www-form-urlencoded" return self.s.post(endpoint, json=json, data=data, headers=headers) + def delete(self, endpoint, json={}): + return self.s.delete(endpoint, json=json, headers=self.headers) + + def put(self, endpoint, json={}): + return self.s.put(endpoint, json=json, headers=self.headers) + def get(self, endpoint, json={}): return self.s.get(endpoint, json=json, headers=self.headers) def set_token(self,token): - self.headers = {"Authorization": f"Bearer {token}"} + self.headers = {"Authorization": f"Bearer {token}"} def unset_token(self): self.headers = {} @@ -72,62 +78,57 @@ class FiregexAPI: def reset(self, delete: bool): self.s.post(f"{self.address}api/reset", json={"delete":delete}) - #Netfilter regex - def nf_get_stats(self): - req = self.s.get(f"{self.address}api/nfregex/stats") - return req.json() - def nf_get_services(self): req = self.s.get(f"{self.address}api/nfregex/services") return req.json() def nf_get_service(self,service_id: str): - req = self.s.get(f"{self.address}api/nfregex/service/{service_id}") + req = self.s.get(f"{self.address}api/nfregex/services/{service_id}") return req.json() def nf_stop_service(self,service_id: str): - req = self.s.get(f"{self.address}api/nfregex/service/{service_id}/stop") + req = self.s.post(f"{self.address}api/nfregex/services/{service_id}/stop") return verify(req) def nf_start_service(self,service_id: str): - req = self.s.get(f"{self.address}api/nfregex/service/{service_id}/start") + req = self.s.post(f"{self.address}api/nfregex/services/{service_id}/start") return verify(req) def nf_delete_service(self,service_id: str): - req = self.s.get(f"{self.address}api/nfregex/service/{service_id}/delete") + req = self.s.delete(f"{self.address}api/nfregex/services/{service_id}") return verify(req) def nf_rename_service(self,service_id: str, newname: str): - req = self.s.post(f"{self.address}api/nfregex/service/{service_id}/rename" , json={"name":newname}) + req = self.s.put(f"{self.address}api/nfregex/services/{service_id}/rename" , json={"name":newname}) return verify(req) def nf_get_service_regexes(self,service_id: str): - req = self.s.get(f"{self.address}api/nfregex/service/{service_id}/regexes") + req = self.s.get(f"{self.address}api/nfregex/services/{service_id}/regexes") return req.json() def nf_get_regex(self,regex_id: str): - req = self.s.get(f"{self.address}api/nfregex/regex/{regex_id}") + req = self.s.get(f"{self.address}api/nfregex/regexes/{regex_id}") return req.json() def nf_delete_regex(self,regex_id: str): - req = self.s.get(f"{self.address}api/nfregex/regex/{regex_id}/delete") + req = self.s.delete(f"{self.address}api/nfregex/regexes/{regex_id}") return verify(req) def nf_enable_regex(self,regex_id: str): - req = self.s.get(f"{self.address}api/nfregex/regex/{regex_id}/enable") + req = self.s.post(f"{self.address}api/nfregex/regexes/{regex_id}/enable") return verify(req) def nf_disable_regex(self,regex_id: str): - req = self.s.get(f"{self.address}api/nfregex/regex/{regex_id}/disable") + req = self.s.post(f"{self.address}api/nfregex/regexes/{regex_id}/disable") return verify(req) def nf_add_regex(self, service_id: str, regex: str, mode: str, active: bool, is_case_sensitive: bool): - req = self.s.post(f"{self.address}api/nfregex/regexes/add", + req = self.s.post(f"{self.address}api/nfregex/regexes", json={"service_id": service_id, "regex": regex, "mode": mode, "active": active, "is_case_sensitive": is_case_sensitive}) return verify(req) def nf_add_service(self, name: str, port: int, proto: str, ip_int: str): - req = self.s.post(f"{self.address}api/nfregex/services/add" , + req = self.s.post(f"{self.address}api/nfregex/services" , json={"name":name,"port":port, "proto": proto, "ip_int": ip_int}) return req.json()["service_id"] if verify(req) else False @@ -137,30 +138,30 @@ class FiregexAPI: return req.json() def ph_get_service(self,service_id: str): - req = self.s.get(f"{self.address}api/porthijack/service/{service_id}") + req = self.s.get(f"{self.address}api/porthijack/services/{service_id}") return req.json() def ph_stop_service(self,service_id: str): - req = self.s.get(f"{self.address}api/porthijack/service/{service_id}/stop") + req = self.s.post(f"{self.address}api/porthijack/services/{service_id}/stop") return verify(req) def ph_start_service(self,service_id: str): - req = self.s.get(f"{self.address}api/porthijack/service/{service_id}/start") + req = self.s.post(f"{self.address}api/porthijack/services/{service_id}/start") return verify(req) def ph_delete_service(self,service_id: str): - req = self.s.get(f"{self.address}api/porthijack/service/{service_id}/delete") + req = self.s.delete(f"{self.address}api/porthijack/services/{service_id}") return verify(req) def ph_rename_service(self,service_id: str,newname: str): - req = self.s.post(f"{self.address}api/porthijack/service/{service_id}/rename" , json={"name":newname}) + req = self.s.put(f"{self.address}api/porthijack/services/{service_id}/rename" , json={"name":newname}) return verify(req) def ph_change_destination(self,service_id: str, ip_dst:string , proxy_port: int): - req = self.s.post(f"{self.address}api/porthijack/service/{service_id}/change-destination", json={"ip_dst": ip_dst, "proxy_port": proxy_port}) + req = self.s.put(f"{self.address}api/porthijack/services/{service_id}/change-destination", json={"ip_dst": ip_dst, "proxy_port": proxy_port}) return verify(req) def ph_add_service(self, name: str, public_port: int, proxy_port: int, proto: str, ip_src: str, ip_dst: str): - req = self.s.post(f"{self.address}api/porthijack/services/add" , + req = self.s.post(f"{self.address}api/porthijack/services" , json={"name":name, "public_port": public_port, "proxy_port":proxy_port, "proto": proto, "ip_src": ip_src, "ip_dst": ip_dst}) return req.json()["service_id"] if verify(req) else False From 2fb77a348f695e0404e9ea3ecf4f16895e5711d1 Mon Sep 17 00:00:00 2001 From: Domingo Dirutigliano Date: Wed, 12 Feb 2025 01:16:10 +0100 Subject: [PATCH 03/11] removed fastapi_socketio + general improves --- .gitignore | 1 + Dockerfile | 2 +- backend/app.py | 11 +- backend/modules/nfproxy/firegex.py | 172 +++++++--------------------- backend/modules/nfproxy/firewall.py | 28 +++-- backend/modules/nfproxy/models.py | 3 +- backend/modules/nfproxy/nftables.py | 8 +- backend/requirements.txt | 2 +- backend/utils/__init__.py | 4 +- frontend/src/App.tsx | 2 +- 10 files changed, 76 insertions(+), 157 deletions(-) diff --git a/.gitignore b/.gitignore index 03e9b7f..40095c0 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,7 @@ /firegex-compose-tmp-file.yml /firegex.py /tests/benchmark.csv +/backend/modules/nfproxy/socks/ # misc **/.DS_Store **/.env.local diff --git a/Dockerfile b/Dockerfile index 7eea0ef..1e55e4e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,7 +27,7 @@ RUN pip3 install --no-cache-dir --break-system-packages -r /execute/requirements COPY ./backend/binsrc /execute/binsrc RUN g++ binsrc/nfqueue.cpp -o modules/cppqueue -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libhs libmnl) -#RUN g++ binsrc/nfproxy-tun.cpp -o modules/cppnfproxy -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libmnl) +#RUN g++ binsrc/nfproxy-tun.cpp -o modules/cppproxy -std=c++23 -O3 -lnetfilter_queue -pthread -lnfnetlink $(pkg-config --cflags --libs libtins libmnl) COPY ./backend/ /execute/ COPY --from=frontend /app/dist/ ./frontend/ diff --git a/backend/app.py b/backend/app.py index 2f6f7e1..5192f6a 100644 --- a/backend/app.py +++ b/backend/app.py @@ -8,13 +8,13 @@ from fastapi import FastAPI, HTTPException, Depends, APIRouter from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm from jose import jwt from passlib.context import CryptContext -from fastapi_socketio import SocketManager from utils.sqlite import SQLite from utils import API_VERSION, FIREGEX_PORT, JWT_ALGORITHM, get_interfaces, socketio_emit, DEBUG, SysctlManager from utils.loader import frontend_deploy, load_routers from utils.models import ChangePasswordModel, IpInterface, PasswordChangeForm, PasswordForm, ResetRequest, StatusModel, StatusMessageModel from contextlib import asynccontextmanager from fastapi.middleware.cors import CORSMiddleware +import socketio # DB init db = SQLite('db/firegex.db') @@ -42,7 +42,14 @@ app = FastAPI( title="Firegex API", version=API_VERSION, ) -utils.socketio = SocketManager(app, "/sock", socketio_path="") +utils.socketio = socketio.AsyncServer( + async_mode="asgi", + cors_allowed_origins=[], + transports=["websocket"] +) + +sio_app = socketio.ASGIApp(utils.socketio, socketio_path="/sock/socket.io", other_asgi_app=app) +app.mount("/sock", sio_app) if DEBUG: app.add_middleware( diff --git a/backend/modules/nfproxy/firegex.py b/backend/modules/nfproxy/firegex.py index 6f0e8d2..20651a5 100644 --- a/backend/modules/nfproxy/firegex.py +++ b/backend/modules/nfproxy/firegex.py @@ -1,97 +1,61 @@ -from modules.nfregex.nftables import FiregexTables +from modules.nfproxy.nftables import FiregexTables from utils import run_func -from modules.nfregex.models import Service, Regex -import re +from modules.nfproxy.models import Service, PyFilter import os import asyncio -import traceback -from utils import DEBUG -from fastapi import HTTPException - -#TODO copied file, review +import socket +import shutil nft = FiregexTables() -class RegexFilter: - def __init__( - self, regex, - is_case_sensitive=True, - input_mode=False, - output_mode=False, - blocked_packets=0, - id=None, - update_func = None - ): - self.regex = regex - self.is_case_sensitive = is_case_sensitive - if input_mode == output_mode: - input_mode = output_mode = True # (False, False) == (True, True) - self.input_mode = input_mode - self.output_mode = output_mode - self.blocked = blocked_packets - self.id = id - self.update_func = update_func - self.compiled_regex = self.compile() - - @classmethod - def from_regex(cls, regex:Regex, update_func = None): - return cls( - id=regex.id, regex=regex.regex, is_case_sensitive=regex.is_case_sensitive, - blocked_packets=regex.blocked_packets, - input_mode = regex.mode in ["C","B"], output_mode=regex.mode in ["S","B"], - update_func = update_func - ) - def compile(self): - if isinstance(self.regex, str): - self.regex = self.regex.encode() - if not isinstance(self.regex, bytes): - raise Exception("Invalid Regex Paramether") - re.compile(self.regex) # raise re.error if it's invalid! - case_sensitive = "1" if self.is_case_sensitive else "0" - if self.input_mode: - yield case_sensitive + "C" + self.regex.hex() - if self.output_mode: - yield case_sensitive + "S" + self.regex.hex() - - async def update(self): - if self.update_func: - await run_func(self.update_func, self) - class FiregexInterceptor: def __init__(self): self.srv:Service - self.filter_map_lock:asyncio.Lock - self.filter_map: dict[str, RegexFilter] - self.regex_filters: set[RegexFilter] - self.update_config_lock:asyncio.Lock + self._stats_updater_cb:callable self.process:asyncio.subprocess.Process - self.update_task: asyncio.Task - self.ack_arrived = False - self.ack_status = None - self.ack_fail_what = "" - self.ack_lock = asyncio.Lock() + self.base_dir = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "socks", self.srv.id + ) + self.n_threads = int(os.getenv("NTHREADS","1")) + + self.connection_socket = os.path.join(self.base_dir, "connection.sock") + self.vedict_sockets = [os.path.join(self.base_dir, f"vedict{i}.sock") for i in range(self.n_threads)] + self.socks = [] + + def add_sock(self, path): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + sock.bind(path) + self.socks.append(sock) + return sock + + async def _call_stats_updater_callback(self, filter: PyFilter): + if self._stats_updater_cb: + await run_func(self._stats_updater_cb(filter)) @classmethod - async def start(cls, srv: Service): + async def start(cls, srv: Service, stats_updater_cb:callable): self = cls() self.srv = srv - self.filter_map_lock = asyncio.Lock() - self.update_config_lock = asyncio.Lock() + self._stats_updater_cb = stats_updater_cb + os.makedirs(self.base_dir, exist_ok=True) + self.add_sock(self.connection_socket) + for path in self.vedict_sockets: + self.add_sock(path) queue_range = await self._start_binary() - self.update_task = asyncio.create_task(self.update_blocked()) + # TODO starts python workers nft.add(self.srv, queue_range) - if not self.ack_lock.locked(): - await self.ack_lock.acquire() return self async def _start_binary(self): - proxy_binary_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),"../cppqueue") + proxy_binary_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),"../cppproxy") self.process = await asyncio.create_subprocess_exec( proxy_binary_path, stdout=asyncio.subprocess.PIPE, stdin=asyncio.subprocess.PIPE, - env={"MATCH_MODE": "stream" if self.srv.proto == "tcp" else "block", "NTHREADS": os.getenv("NTHREADS","1")}, ) + self.process.stdin.write(self.base_dir.encode().hex().encode()+b" 3\n") + await self.process.stdin.drain() line_fut = self.process.stdout.readuntil() try: line_fut = await asyncio.wait_for(line_fut, timeout=3) @@ -106,68 +70,14 @@ class FiregexInterceptor: self.process.kill() raise Exception("Invalid binary output") - async def update_blocked(self): - try: - while True: - line = (await self.process.stdout.readuntil()).decode() - if DEBUG: - print(line) - if line.startswith("BLOCKED "): - regex_id = line.split()[1] - async with self.filter_map_lock: - if regex_id in self.filter_map: - self.filter_map[regex_id].blocked+=1 - await self.filter_map[regex_id].update() - if line.startswith("ACK "): - self.ack_arrived = True - self.ack_status = line.split()[1].upper() == "OK" - if not self.ack_status: - self.ack_fail_what = " ".join(line.split()[2:]) - self.ack_lock.release() - except asyncio.CancelledError: - pass - except asyncio.IncompleteReadError: - pass - except Exception: - traceback.print_exc() - async def stop(self): - self.update_task.cancel() if self.process and self.process.returncode is None: self.process.kill() - - async def _update_config(self, filters_codes): - async with self.update_config_lock: - self.process.stdin.write((" ".join(filters_codes)+"\n").encode()) - await self.process.stdin.drain() - try: - async with asyncio.timeout(3): - await self.ack_lock.acquire() - except TimeoutError: - pass - if not self.ack_arrived or not self.ack_status: - raise HTTPException(status_code=500, detail=f"NFQ error: {self.ack_fail_what}") - - - async def reload(self, filters:list[RegexFilter]): - async with self.filter_map_lock: - self.filter_map = self.compile_filters(filters) - filters_codes = self.get_filter_codes() - await self._update_config(filters_codes) - - def get_filter_codes(self): - filters_codes = list(self.filter_map.keys()) - filters_codes.sort(key=lambda a: self.filter_map[a].blocked, reverse=True) - return filters_codes - - def compile_filters(self, filters:list[RegexFilter]): - res = {} - for filter_obj in filters: - try: - raw_filters = filter_obj.compile() - for filter in raw_filters: - res[filter] = filter_obj - except Exception: - pass - return res + for sock in self.socks: + sock.close() + shutil.rmtree(self.base_dir) + async def reload(self, filters:list[PyFilter]): + # filters are the functions to use in the workers (other functions are disabled or not flagged as filters) + # TODO update filters in python workers (prob for new filters added) (reading from file????) + pass \ No newline at end of file diff --git a/backend/modules/nfproxy/firewall.py b/backend/modules/nfproxy/firewall.py index 5ff1c39..59002d9 100644 --- a/backend/modules/nfproxy/firewall.py +++ b/backend/modules/nfproxy/firewall.py @@ -1,18 +1,15 @@ import asyncio -from modules.nfregex.firegex import FiregexInterceptor, RegexFilter -from modules.nfregex.nftables import FiregexTables, FiregexFilter -from modules.nfregex.models import Regex, Service +from modules.nfproxy.firegex import FiregexInterceptor +from modules.nfproxy.nftables import FiregexTables, FiregexFilter +from modules.nfproxy.models import Service, PyFilter from utils.sqlite import SQLite -#TODO copied file, review - class STATUS: STOP = "stop" ACTIVE = "active" nft = FiregexTables() - class ServiceManager: def __init__(self, srv: Service, db): self.srv = srv @@ -23,13 +20,13 @@ class ServiceManager: self.interceptor = None async def _update_filters_from_db(self): - regexes = [ - Regex.from_dict(ele) for ele in - self.db.query("SELECT * FROM regexes WHERE service_id = ? AND active=1;", self.srv.id) + pyfilters = [ + PyFilter.from_dict(ele) for ele in + self.db.query("SELECT * FROM pyfilter WHERE service_id = ? AND active=1;", self.srv.id) ] #Filter check old_filters = set(self.filters.keys()) - new_filters = set([f.id for f in regexes]) + new_filters = set([f.id for f in pyfilters]) #remove old filters for f in old_filters: if f not in new_filters: @@ -37,8 +34,7 @@ class ServiceManager: #add new filters for f in new_filters: if f not in old_filters: - filter = [ele for ele in regexes if ele.id == f][0] - self.filters[f] = RegexFilter.from_regex(filter, self._stats_updater) + self.filters[f] = [ele for ele in pyfilters if ele.id == f][0] if self.interceptor: await self.interceptor.reload(self.filters.values()) @@ -54,8 +50,8 @@ class ServiceManager: elif (self.status, to) == (STATUS.STOP, STATUS.ACTIVE): await self.restart() - def _stats_updater(self,filter:RegexFilter): - self.db.query("UPDATE regexes SET blocked_packets = ? WHERE regex_id = ?;", filter.blocked, filter.id) + def _stats_updater(self,filter:PyFilter): + self.db.query("UPDATE pyfilter SET blocked_packets = ?, edited_packets = ? WHERE filter_id = ?;", filter.blocked_packets, filter.edited_packets, filter.id) def _set_status(self,status): self.status = status @@ -64,7 +60,7 @@ class ServiceManager: async def start(self): if not self.interceptor: nft.delete(self.srv) - self.interceptor = await FiregexInterceptor.start(self.srv) + self.interceptor = await FiregexInterceptor.start(self.srv, self._stats_updater) await self._update_filters_from_db() self._set_status(STATUS.ACTIVE) @@ -119,3 +115,5 @@ class FirewallManager: class ServiceNotFoundException(Exception): pass + + diff --git a/backend/modules/nfproxy/models.py b/backend/modules/nfproxy/models.py index 24d1087..ba048c4 100644 --- a/backend/modules/nfproxy/models.py +++ b/backend/modules/nfproxy/models.py @@ -1,3 +1,4 @@ + class Service: def __init__(self, service_id: str, status: str, port: int, name: str, proto: str, ip_int: str, **other): self.id = service_id @@ -14,7 +15,7 @@ class Service: class PyFilter: def __init__(self, filter_id:int, name: str, blocked_packets: int, edited_packets: int, active: bool, **other): - self.filter_id = filter_id + self.id = filter_id self.name = name self.blocked_packets = blocked_packets self.edited_packets = edited_packets diff --git a/backend/modules/nfproxy/nftables.py b/backend/modules/nfproxy/nftables.py index 54c170a..588ac1e 100644 --- a/backend/modules/nfproxy/nftables.py +++ b/backend/modules/nfproxy/nftables.py @@ -1,4 +1,4 @@ -from modules.nfregex.models import Service +from modules.nfproxy.models import Service from utils import ip_parse, ip_family, NFTableManager, nftables_int_to_json class FiregexFilter: @@ -48,10 +48,12 @@ class FiregexTables(NFTableManager): def add(self, srv:Service, queue_range): for ele in self.get(): - if ele.__eq__(srv): return + if ele.__eq__(srv): + return init, end = queue_range - if init > end: init, end = end, init + if init > end: + init, end = end, init self.cmd( { "insert":{ "rule": { "family": "inet", diff --git a/backend/requirements.txt b/backend/requirements.txt index 9678711..024f520 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -4,5 +4,5 @@ uvicorn[standard] passlib[bcrypt] psutil python-jose[cryptography] -fastapi-socketio +python-socketio #git+https://salsa.debian.org/pkg-netfilter-team/pkg-nftables#egg=nftables&subdirectory=py diff --git a/backend/utils/__init__.py b/backend/utils/__init__.py index 44bf193..1d9c23a 100644 --- a/backend/utils/__init__.py +++ b/backend/utils/__init__.py @@ -5,13 +5,13 @@ import socket import psutil import sys import nftables -from fastapi_socketio import SocketManager +from socketio import AsyncServer from fastapi import Path from typing import Annotated LOCALHOST_IP = socket.gethostbyname(os.getenv("LOCALHOST_IP","127.0.0.1")) -socketio:SocketManager = None +socketio:AsyncServer = None ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) ROUTERS_DIR = os.path.join(ROOT_DIR,"routers") diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index fcfd39b..7fc33a5 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -14,7 +14,7 @@ import { Firewall } from './pages/Firewall'; import { useQueryClient } from '@tanstack/react-query'; -const socket = IS_DEV?io("ws://"+DEV_IP_BACKEND, {transports: ["websocket", "polling"], path:"/sock" }):io({transports: ["websocket", "polling"], path:"/sock"}); +const socket = IS_DEV?io("ws://"+DEV_IP_BACKEND, {transports: ["websocket"], path:"/sock/socket.io" }):io({transports: ["websocket"], path:"/sock/socket.io"}); function App() { From ec3bd84aaf532600c0091428188fc97162ad36a9 Mon Sep 17 00:00:00 2001 From: Domingo Dirutigliano Date: Wed, 12 Feb 2025 22:24:59 +0100 Subject: [PATCH 04/11] Frontend re-styling --- README.md | 2 +- frontend/bun.lock | 14 +- frontend/package.json | 12 +- .../src/components/NFRegex/AddNewService.tsx | 12 +- .../NFRegex/ServiceRow/RenameForm.tsx | 12 +- .../components/NFRegex/ServiceRow/index.tsx | 60 +++---- .../components/PortHijack/AddNewService.tsx | 12 +- .../ServiceRow/ChangeDestination.tsx | 11 +- .../PortHijack/ServiceRow/RenameForm.tsx | 12 +- .../PortHijack/ServiceRow/index.tsx | 56 +++--- frontend/src/components/RegexView/index.tsx | 62 +++---- frontend/src/components/YesNoModal.tsx | 2 +- frontend/src/index.css | 72 ++++++-- frontend/src/pages/NFRegex/ServiceDetails.tsx | 161 +++++++++++++++++- 14 files changed, 330 insertions(+), 170 deletions(-) diff --git a/README.md b/README.md index aa4bc3a..1e4e8b5 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ This means that firegex is projected to avoid any possibility to have the servic Initiially the project was based only on regex filters, and also now the main function uses regexes, but firegex have and will have also other filtering tools. # Credits -- Copyright (c) 2022 Pwnzer0tt1 +- Copyright (c) 2022-2025 Pwnzer0tt1 ## Star History diff --git a/frontend/bun.lock b/frontend/bun.lock index 01d62fd..526f399 100644 --- a/frontend/bun.lock +++ b/frontend/bun.lock @@ -141,17 +141,17 @@ "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], - "@mantine/core": ["@mantine/core@7.16.2", "", { "dependencies": { "@floating-ui/react": "^0.26.28", "clsx": "^2.1.1", "react-number-format": "^5.4.3", "react-remove-scroll": "^2.6.2", "react-textarea-autosize": "8.5.6", "type-fest": "^4.27.0" }, "peerDependencies": { "@mantine/hooks": "7.16.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "sha512-6dwFz+8HrOqFan7GezgpoWyZSCxedh10S8iILGVsc3GXiD4gzo+3VZndZKccktkYZ3GVC9E3cCS3SxbiyKSAVw=="], + "@mantine/core": ["@mantine/core@7.16.3", "", { "dependencies": { "@floating-ui/react": "^0.26.28", "clsx": "^2.1.1", "react-number-format": "^5.4.3", "react-remove-scroll": "^2.6.2", "react-textarea-autosize": "8.5.6", "type-fest": "^4.27.0" }, "peerDependencies": { "@mantine/hooks": "7.16.3", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "sha512-cxhIpfd2i0Zmk9TKdejYAoIvWouMGhzK3OOX+VRViZ5HEjnTQCGl2h3db56ThqB6NfVPCno6BPbt5lwekTtmuQ=="], - "@mantine/form": ["@mantine/form@7.16.2", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "klona": "^2.0.6" }, "peerDependencies": { "react": "^18.x || ^19.x" } }, "sha512-JZkLbZ7xWAZndPrxObkf10gjHj57x8yvI/vobjDhfWN3zFPTSWmSSF6yBE1FpITseOs3oR03hlkqG6EclK6g+g=="], + "@mantine/form": ["@mantine/form@7.16.3", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "klona": "^2.0.6" }, "peerDependencies": { "react": "^18.x || ^19.x" } }, "sha512-GqomUG2Ri5adxYsTU1S5IhKRPcqTG5JkPvMERns8PQAcUz/lvzsnk3wY1v4K5CEbCAdpimle4bSsZTM9g697vg=="], - "@mantine/hooks": ["@mantine/hooks@7.16.2", "", { "peerDependencies": { "react": "^18.x || ^19.x" } }, "sha512-ZFHQhDi9T+r6VR5NEeE47gigPPIAHVIKDOCWsCsbCqHc3yz5l8kiO2RdfUmsTKV2KD/AiXnAw4b6pjQEP58GOg=="], + "@mantine/hooks": ["@mantine/hooks@7.16.3", "", { "peerDependencies": { "react": "^18.x || ^19.x" } }, "sha512-B94FBWk5Sc81tAjV+B3dGh/gKzfqzpzVC/KHyBRWOOyJRqeeRbI/FAaJo4zwppyQo1POSl5ArdyjtDRrRIj2SQ=="], - "@mantine/modals": ["@mantine/modals@7.16.2", "", { "peerDependencies": { "@mantine/core": "7.16.2", "@mantine/hooks": "7.16.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "sha512-REwAV53Fcz021EE3zLyYdkdFlfG+b24y279Y+eA1jCCH9VMLivXL+gacrox4BcpzREsic9nGVInSNv3VJwPlAQ=="], + "@mantine/modals": ["@mantine/modals@7.16.3", "", { "peerDependencies": { "@mantine/core": "7.16.3", "@mantine/hooks": "7.16.3", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "sha512-BJuDzRugK6xLbuFTTo8NLJumVvVmSYsNVcEtmlXOWTE3NkDGktBXGKo8V1B0XfJ9/d/rZw7HCE0p4i76MtA+bQ=="], - "@mantine/notifications": ["@mantine/notifications@7.16.2", "", { "dependencies": { "@mantine/store": "7.16.2", "react-transition-group": "4.4.5" }, "peerDependencies": { "@mantine/core": "7.16.2", "@mantine/hooks": "7.16.2", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "sha512-U342XWiiRI1NvOlLsI6PH/pSNe0rxNClJ2w5orvjOMXvaAfDe52mhnzRmtzRxYENp06++3b/G7MjPH+466rF9Q=="], + "@mantine/notifications": ["@mantine/notifications@7.16.3", "", { "dependencies": { "@mantine/store": "7.16.3", "react-transition-group": "4.4.5" }, "peerDependencies": { "@mantine/core": "7.16.3", "@mantine/hooks": "7.16.3", "react": "^18.x || ^19.x", "react-dom": "^18.x || ^19.x" } }, "sha512-wtEME9kSYfXWYmAmQUZ8c+rwNmhdWRBaW1mlPdQsPkzMqkv4q6yy0IpgwcnuHStSG9EHaQBXazmVxMZJdEAWBQ=="], - "@mantine/store": ["@mantine/store@7.16.2", "", { "peerDependencies": { "react": "^18.x || ^19.x" } }, "sha512-9dEGLosrYSePlAwhfx3CxTLcWu2M98TtuYnelAiHEdNEkyafirvZxNt4paMoFXLKR1XPm5wdjDK7bdTaE0t7Og=="], + "@mantine/store": ["@mantine/store@7.16.3", "", { "peerDependencies": { "react": "^18.x || ^19.x" } }, "sha512-6M2M5+0BrRtnVv+PUmr04tY1RjPqyapaHplo90uK1NMhP/1EIqrwTL9KoEtCNCJ5pog1AQtu0bj0QPbqUvxwLg=="], "@rollup/pluginutils": ["@rollup/pluginutils@5.1.4", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", "picomatch": "^4.0.2" }, "peerDependencies": { "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" }, "optionalPeers": ["rollup"] }, "sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ=="], @@ -205,7 +205,7 @@ "@types/jest": ["@types/jest@27.5.2", "", { "dependencies": { "jest-matcher-utils": "^27.0.0", "pretty-format": "^27.0.0" } }, "sha512-mpT8LJJ4CMeeahobofYWIjFo0xonRS/HfxnVEPMPFSQdGUt1uHCnoPT7Zhb+sjDU2wz0oKV0OLUR0WzrHNgfeA=="], - "@types/node": ["@types/node@20.17.16", "", { "dependencies": { "undici-types": "~6.19.2" } }, "sha512-vOTpLduLkZXePLxHiHsBLp98mHGnl8RptV4YAO3HfKO5UHjDvySGbxKtpYfy8Sx5+WKcgc45qNreJJRVM3L6mw=="], + "@types/node": ["@types/node@20.17.17", "", { "dependencies": { "undici-types": "~6.19.2" } }, "sha512-/WndGO4kIfMicEQLTi/mDANUu/iVUhT7KboZPdEqqHQ4aTS+3qT3U5gIqWDFV+XouorjfgGqvKILJeHhuQgFYg=="], "@types/prop-types": ["@types/prop-types@15.7.14", "", {}, "sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ=="], diff --git a/frontend/package.json b/frontend/package.json index 8a0cba7..071420d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -5,14 +5,14 @@ "private": true, "dependencies": { "@hello-pangea/dnd": "^16.6.0", - "@mantine/core": "^7.16.2", - "@mantine/form": "^7.16.2", - "@mantine/hooks": "^7.16.2", - "@mantine/modals": "^7.16.2", - "@mantine/notifications": "^7.16.2", + "@mantine/core": "^7.16.3", + "@mantine/form": "^7.16.3", + "@mantine/hooks": "^7.16.3", + "@mantine/modals": "^7.16.3", + "@mantine/notifications": "^7.16.3", "@tanstack/react-query": "^4.36.1", "@types/jest": "^27.5.2", - "@types/node": "^20.17.16", + "@types/node": "^20.17.17", "@types/react": "^18.3.18", "@types/react-dom": "^18.3.5", "buffer": "^6.0.3", diff --git a/frontend/src/components/NFRegex/AddNewService.tsx b/frontend/src/components/NFRegex/AddNewService.tsx index f0dbc17..f88f7a9 100644 --- a/frontend/src/components/NFRegex/AddNewService.tsx +++ b/frontend/src/components/NFRegex/AddNewService.tsx @@ -86,16 +86,16 @@ function AddNewService({ opened, onClose }:{ opened:boolean, onClose:()=>void }) /> - + - - {error?<> - } color="red" onClose={()=>{setError(null)}}> - Error: {error} - :null} + + } color="red" onClose={()=>{setError(null)}}> + Error: {error} + + :null} diff --git a/frontend/src/components/NFRegex/ServiceRow/RenameForm.tsx b/frontend/src/components/NFRegex/ServiceRow/RenameForm.tsx index 412be2c..a643bec 100644 --- a/frontend/src/components/NFRegex/ServiceRow/RenameForm.tsx +++ b/frontend/src/components/NFRegex/ServiceRow/RenameForm.tsx @@ -49,16 +49,16 @@ function RenameForm({ opened, onClose, service }:{ opened:boolean, onClose:()=>v placeholder="Awesome Service Name!" {...form.getInputProps('name')} /> - + - - {error?<> - } color="red" onClose={()=>{setError(null)}}> - Error: {error} - :null} + + } color="red" onClose={()=>{setError(null)}}> + Error: {error} + + :null} diff --git a/frontend/src/components/NFRegex/ServiceRow/index.tsx b/frontend/src/components/NFRegex/ServiceRow/index.tsx index 0ec16a2..d8167bc 100644 --- a/frontend/src/components/NFRegex/ServiceRow/index.tsx +++ b/frontend/src/components/NFRegex/ServiceRow/index.tsx @@ -2,7 +2,7 @@ import { ActionIcon, Badge, Box, Divider, Grid, Menu, Space, Title, Tooltip } fr import { useState } from 'react'; import { FaPlay, FaStop } from 'react-icons/fa'; import { nfregex, Service, serviceQueryKey } from '../utils'; -import { MdOutlineArrowForwardIos } from "react-icons/md" +import { MdDoubleArrow, MdOutlineArrowForwardIos } from "react-icons/md" import YesNoModal from '../../YesNoModal'; import { errorNotify, isMediumScreen, okNotify, regex_ipv4 } from '../../../js/utils'; import { BsTrashFill } from 'react-icons/bs'; @@ -10,8 +10,10 @@ import { BiRename } from 'react-icons/bi' import RenameForm from './RenameForm'; import { MenuDropDownWithButton } from '../../MainLayout'; import { useQueryClient } from '@tanstack/react-query'; +import { FaFilter } from "react-icons/fa"; +import { VscRegex } from "react-icons/vsc"; -function ServiceRow({ service, onClick }:{ service:Service, onClick?:()=>void }) { +export default function ServiceRow({ service, onClick }:{ service:Service, onClick?:()=>void }) { let status_color = "gray"; switch(service.status){ @@ -72,36 +74,34 @@ function ServiceRow({ service, onClick }:{ service:Service, onClick?:()=>void }) return <> - - - - - + <Box className="firegex__nfregex__row" style={{width:"100%", flexDirection: isMedium?"row":"column"}}> + <Box> + <Box className="center-flex" style={{ justifyContent: "flex-start" }}> + <MdDoubleArrow size={30} style={{color: "white"}}/> + <Title className="firegex__nfregex__name" ml="xs"> {service.name} - - Status: {service.status} - - :{service.port} - - - {isMedium?null:} - + + {service.status} + + :{service.port} + + + {isMedium?null:} + - - - - - + - Connections Blocked: {service.n_packets} - - Regex: {service.n_regex} - {service.ip_int} on {service.proto} + + + {service.n_packets} + + {service.n_regex} + - {isMedium?:} + {isMedium?:} Rename service @@ -129,14 +129,12 @@ function ServiceRow({ service, onClick }:{ service:Service, onClick?:()=>void }) {isMedium?:} - {onClick? + {onClick? :null} - {isMedium?:null} - - - + + void }) /> } - -export default ServiceRow; diff --git a/frontend/src/components/PortHijack/AddNewService.tsx b/frontend/src/components/PortHijack/AddNewService.tsx index b685672..44622be 100644 --- a/frontend/src/components/PortHijack/AddNewService.tsx +++ b/frontend/src/components/PortHijack/AddNewService.tsx @@ -94,16 +94,16 @@ function AddNewService({ opened, onClose }:{ opened:boolean, onClose:()=>void }) /> - + - - {error?<> - } color="red" onClose={()=>{setError(null)}}> - Error: {error} - :null} + + } color="red" onClose={()=>{setError(null)}}> + Error: {error} + + :null} diff --git a/frontend/src/components/PortHijack/ServiceRow/ChangeDestination.tsx b/frontend/src/components/PortHijack/ServiceRow/ChangeDestination.tsx index e4f2bef..8d135e9 100644 --- a/frontend/src/components/PortHijack/ServiceRow/ChangeDestination.tsx +++ b/frontend/src/components/PortHijack/ServiceRow/ChangeDestination.tsx @@ -53,15 +53,16 @@ function ChangeDestination({ opened, onClose, service }:{ opened:boolean, onClos
- + - {error?<> - } color="red" onClose={()=>{setError(null)}}> - Error: {error} - :null} + + } color="red" onClose={()=>{setError(null)}}> + Error: {error} + + :null} diff --git a/frontend/src/components/PortHijack/ServiceRow/RenameForm.tsx b/frontend/src/components/PortHijack/ServiceRow/RenameForm.tsx index 84f9fba..4d75c42 100644 --- a/frontend/src/components/PortHijack/ServiceRow/RenameForm.tsx +++ b/frontend/src/components/PortHijack/ServiceRow/RenameForm.tsx @@ -49,16 +49,16 @@ function RenameForm({ opened, onClose, service }:{ opened:boolean, onClose:()=>v placeholder="Awesome Service Name!" {...form.getInputProps('name')} /> - + - - {error?<> - } color="red" onClose={()=>{setError(null)}}> - Error: {error} - :null} + + } color="red" onClose={()=>{setError(null)}}> + Error: {error} + + :null} diff --git a/frontend/src/components/PortHijack/ServiceRow/index.tsx b/frontend/src/components/PortHijack/ServiceRow/index.tsx index d481f75..2f4a136 100644 --- a/frontend/src/components/PortHijack/ServiceRow/index.tsx +++ b/frontend/src/components/PortHijack/ServiceRow/index.tsx @@ -8,11 +8,11 @@ import { BsArrowRepeat, BsTrashFill } from 'react-icons/bs'; import { BiRename } from 'react-icons/bi' import RenameForm from './RenameForm'; import ChangeDestination from './ChangeDestination'; -import PortInput from '../../PortInput'; import { useForm } from '@mantine/form'; import { MenuDropDownWithButton } from '../../MainLayout'; +import { MdDoubleArrow } from "react-icons/md"; -function ServiceRow({ service }:{ service:Service }) { +export default function ServiceRow({ service }:{ service:Service }) { let status_color = service.active ? "teal": "red" @@ -72,40 +72,36 @@ function ServiceRow({ service }:{ service:Service }) { return <> - - - - + <Box className="firegex__nfregex__row" style={{width:"100%", flexDirection: isMedium?"row":"column"}}> + <Box> + <Box className="center-flex" style={{ justifyContent: "flex-start" }}> + <MdDoubleArrow size={30} style={{color: "white"}}/> + <Title className="firegex__nfregex__name" ml="xs"> {service.name} - - Status: {service.active?"ENABLED":"DISABLED"} - - {service.proto} - - - {isMedium?null:} - + + {service.active?"ENABLED":"DISABLED"} + + {service.proto} + + + {isMedium?null:} + - - - - - - + - - FROM {service.ip_src} : {service.public_port} + + FROM {service.ip_src} :{service.public_port} - + - TO {service.ip_dst} : service.proxy_port + TO {service.ip_dst} :{service.proxy_port} - {isMedium?:} + {isMedium?:} Rename service @@ -134,14 +130,10 @@ function ServiceRow({ service }:{ service:Service }) { - - {isMedium?:null} - - - + + - } - -export default ServiceRow; diff --git a/frontend/src/components/RegexView/index.tsx b/frontend/src/components/RegexView/index.tsx index 20b9d18..e5b0821 100644 --- a/frontend/src/components/RegexView/index.tsx +++ b/frontend/src/components/RegexView/index.tsx @@ -1,18 +1,19 @@ -import { Grid, Text, Title, Badge, Space, ActionIcon, Tooltip, Box } from '@mantine/core'; +import { Text, Title, Badge, Space, ActionIcon, Tooltip, Box } from '@mantine/core'; import { useState } from 'react'; import { RegexFilter } from '../../js/models'; -import { b64decode, errorNotify, getapiobject, okNotify } from '../../js/utils'; +import { b64decode, errorNotify, getapiobject, isMediumScreen, okNotify } from '../../js/utils'; import { BsTrashFill } from "react-icons/bs" import YesNoModal from '../YesNoModal'; import { FaPause, FaPlay } from 'react-icons/fa'; import { useClipboard } from '@mantine/hooks'; - +import { FaFilter } from "react-icons/fa"; +import { VscRegex } from "react-icons/vsc"; function RegexView({ regexInfo }:{ regexInfo:RegexFilter }) { const mode_string = regexInfo.mode === "C"? "C -> S": regexInfo.mode === "S"? "S -> C": - regexInfo.mode === "B"? "S <-> C": "🤔" + regexInfo.mode === "B"? "C <-> S": "🤔" let regex_expr = b64decode(regexInfo.regex); @@ -20,6 +21,7 @@ function RegexView({ regexInfo }:{ regexInfo:RegexFilter }) { const [deleteTooltipOpened, setDeleteTooltipOpened] = useState(false); const [statusTooltipOpened, setStatusTooltipOpened] = useState(false); const clipboard = useClipboard({ timeout: 500 }); + const isMedium = isMediumScreen(); const deleteRegex = () => { getapiobject().regexdelete(regexInfo.id).then(res => { @@ -42,57 +44,39 @@ function RegexView({ regexInfo }:{ regexInfo:RegexFilter }) { } return - - - Regex: - - + + { clipboard.copy(regex_expr) okNotify("Regex copied to clipboard!",`The regex '${regex_expr}' has been copied to the clipboard!`) }}>{regex_expr} - - setStatusTooltipOpened(false)} onBlur={() => setStatusTooltipOpened(false)} - onMouseEnter={() => setStatusTooltipOpened(true)} onMouseLeave={() => setStatusTooltipOpened(false)} + onFocus={() => setStatusTooltipOpened(false)} onBlur={() => setStatusTooltipOpened(false)} + onMouseEnter={() => setStatusTooltipOpened(true)} onMouseLeave={() => setStatusTooltipOpened(false)} >{regexInfo.active?:} setDeleteModal(true)} size="xl" radius="md" variant="filled" - onFocus={() => setDeleteTooltipOpened(false)} onBlur={() => setDeleteTooltipOpened(false)} - onMouseEnter={() => setDeleteTooltipOpened(true)} onMouseLeave={() => setDeleteTooltipOpened(false)} + onFocus={() => setDeleteTooltipOpened(false)} onBlur={() => setDeleteTooltipOpened(false)} + onMouseEnter={() => setDeleteTooltipOpened(true)} onMouseLeave={() => setDeleteTooltipOpened(false)} > - - - - - - - Service: {regexInfo.service_id} - - {regexInfo.active?"ACTIVE":"DISABLED"} - - ID: {regexInfo.id} - - - - - - Case: {regexInfo.is_case_sensitive?"SENSIIVE":"INSENSITIVE"} - - Packets filtered: {regexInfo.n_packets} - - Mode: {mode_string} - - - + + + {regexInfo.n_packets} + + {regexInfo.active?"ACTIVE":"DISABLED"} + + {regexInfo.is_case_sensitive?"Strict":"Loose"} + + {mode_string} + + {description} - + + + + {error?<> + + } color="red" onClose={()=>{setError(null)}}> + Error: {error} + + :null} + + + + +} + +export default AddEditService; diff --git a/frontend/src/components/NFRegex/AddNewService.tsx b/frontend/src/components/NFRegex/AddNewService.tsx deleted file mode 100644 index f88f7a9..0000000 --- a/frontend/src/components/NFRegex/AddNewService.tsx +++ /dev/null @@ -1,105 +0,0 @@ -import { Button, Group, Space, TextInput, Notification, Modal, Switch, SegmentedControl, Box } from '@mantine/core'; -import { useForm } from '@mantine/form'; -import { useState } from 'react'; -import { okNotify, regex_ipv4, regex_ipv6 } from '../../js/utils'; -import { ImCross } from "react-icons/im" -import { nfregex } from './utils'; -import PortAndInterface from '../PortAndInterface'; - -type ServiceAddForm = { - name:string, - port:number, - proto:string, - ip_int:string, - autostart: boolean, -} - -function AddNewService({ opened, onClose }:{ opened:boolean, onClose:()=>void }) { - - const form = useForm({ - initialValues: { - name:"", - port:8080, - ip_int:"", - proto:"tcp", - autostart: true - }, - validate:{ - name: (value) => value !== "" ? null : "Service name is required", - port: (value) => (value>0 && value<65536) ? null : "Invalid port", - proto: (value) => ["tcp","udp"].includes(value) ? null : "Invalid protocol", - ip_int: (value) => (value.match(regex_ipv6) || value.match(regex_ipv4)) ? null : "Invalid IP address", - } - }) - - const close = () =>{ - onClose() - form.reset() - setError(null) - } - - const [submitLoading, setSubmitLoading] = useState(false) - const [error, setError] = useState(null) - - const submitRequest = ({ name, port, autostart, proto, ip_int }:ServiceAddForm) =>{ - setSubmitLoading(true) - nfregex.servicesadd({name, port, proto, ip_int }).then( res => { - if (res.status === "ok" && res.service_id){ - setSubmitLoading(false) - close(); - if (autostart) nfregex.servicestart(res.service_id) - okNotify(`Service ${name} has been added`, `Successfully added service with port ${port}`) - }else{ - setSubmitLoading(false) - setError("Invalid request! [ "+res.status+" ]") - } - }).catch( err => { - setSubmitLoading(false) - setError("Request Failed! [ "+err+" ]") - }) - } - - - return -
- - - - - - - - - - - - - - - - {error?<> - - } color="red" onClose={()=>{setError(null)}}> - Error: {error} - - :null} - - -
- -} - -export default AddNewService; diff --git a/frontend/src/components/NFRegex/ServiceRow/index.tsx b/frontend/src/components/NFRegex/ServiceRow/index.tsx index d8167bc..53a5c1b 100644 --- a/frontend/src/components/NFRegex/ServiceRow/index.tsx +++ b/frontend/src/components/NFRegex/ServiceRow/index.tsx @@ -12,6 +12,8 @@ import { MenuDropDownWithButton } from '../../MainLayout'; import { useQueryClient } from '@tanstack/react-query'; import { FaFilter } from "react-icons/fa"; import { VscRegex } from "react-icons/vsc"; +import { IoSettingsSharp } from 'react-icons/io5'; +import AddEditService from '../AddEditService'; export default function ServiceRow({ service, onClick }:{ service:Service, onClick?:()=>void }) { @@ -26,6 +28,7 @@ export default function ServiceRow({ service, onClick }:{ service:Service, onCli const [tooltipStopOpened, setTooltipStopOpened] = useState(false); const [deleteModal, setDeleteModal] = useState(false) const [renameModal, setRenameModal] = useState(false) + const [editModal, setEditModal] = useState(false) const isMedium = isMediumScreen() const stopService = async () => { @@ -104,12 +107,13 @@ export default function ServiceRow({ service, onClick }:{ service:Service, onCli {isMedium?:} - Rename service + Edit service + } onClick={()=>setEditModal(true)}>Service Settings } onClick={()=>setRenameModal(true)}>Change service name Danger zone } onClick={()=>setDeleteModal(true)}>Delete Service - + + setEditModal(false)} + edit={service} + /> } diff --git a/frontend/src/components/NFRegex/utils.ts b/frontend/src/components/NFRegex/utils.ts index 1c34bc3..faa67c4 100644 --- a/frontend/src/components/NFRegex/utils.ts +++ b/frontend/src/components/NFRegex/utils.ts @@ -12,6 +12,7 @@ export type Service = { ip_int: string, n_packets:number, n_regex:number, + fail_open:boolean, } export type ServiceAddForm = { @@ -19,6 +20,14 @@ export type ServiceAddForm = { port:number, proto:string, ip_int:string, + fail_open: boolean, +} + +export type ServiceSettings = { + port?:number, + proto?:string, + ip_int?:string, + fail_open?: boolean, } export type ServiceAddResponse = { @@ -79,5 +88,9 @@ export const nfregex = { }, serviceregexes: async (service_id:string) => { return await getapi(`nfregex/services/${service_id}/regexes`) as RegexFilter[]; - } + }, + settings: async (service_id:string, data:ServiceSettings) => { + const { status } = await putapi(`nfregex/services/${service_id}/settings`,data) as ServerResponse; + return status === "ok"?undefined:status + }, } \ No newline at end of file diff --git a/frontend/src/pages/NFRegex/ServiceDetails.tsx b/frontend/src/pages/NFRegex/ServiceDetails.tsx index 176a83d..bb6255f 100644 --- a/frontend/src/pages/NFRegex/ServiceDetails.tsx +++ b/frontend/src/pages/NFRegex/ServiceDetails.tsx @@ -3,7 +3,7 @@ import { Navigate, useNavigate, useParams } from 'react-router-dom'; import RegexView from '../../components/RegexView'; import AddNewRegex from '../../components/AddNewRegex'; import { BsPlusLg } from "react-icons/bs"; -import { nfregexServiceQuery, nfregexServiceRegexesQuery } from '../../components/NFRegex/utils'; +import { nfregexServiceQuery, nfregexServiceRegexesQuery, Service } from '../../components/NFRegex/utils'; import { Badge, Divider, Menu } from '@mantine/core'; import { useState } from 'react'; import { FaFilter, FaPlay, FaStop } from 'react-icons/fa'; @@ -18,6 +18,8 @@ import { MenuDropDownWithButton } from '../../components/MainLayout'; import { useQueryClient } from '@tanstack/react-query'; import { FaArrowLeft } from "react-icons/fa"; import { VscRegex } from 'react-icons/vsc'; +import { IoSettingsSharp } from 'react-icons/io5'; +import AddEditService from '../../components/NFRegex/AddEditService'; export default function ServiceDetailsNFRegex() { @@ -29,6 +31,7 @@ export default function ServiceDetailsNFRegex() { const regexesList = nfregexServiceRegexesQuery(srv??"") const [deleteModal, setDeleteModal] = useState(false) const [renameModal, setRenameModal] = useState(false) + const [editModal, setEditModal] = useState(false) const [buttonLoading, setButtonLoading] = useState(false) const queryClient = useQueryClient() const [tooltipStopOpened, setTooltipStopOpened] = useState(false); @@ -108,7 +111,8 @@ export default function ServiceDetailsNFRegex() { - Rename service + Edit service + } onClick={()=>setEditModal(true)}>Service Settings } onClick={()=>setRenameModal(true)}>Change service name Danger zone @@ -190,5 +194,10 @@ export default function ServiceDetailsNFRegex() { opened={renameModal} service={serviceInfo} /> + setEditModal(false)} + edit={serviceInfo} + /> } diff --git a/frontend/src/pages/NFRegex/index.tsx b/frontend/src/pages/NFRegex/index.tsx index 8ad5445..6153458 100644 --- a/frontend/src/pages/NFRegex/index.tsx +++ b/frontend/src/pages/NFRegex/index.tsx @@ -5,7 +5,7 @@ import { useNavigate, useParams } from 'react-router-dom'; import ServiceRow from '../../components/NFRegex/ServiceRow'; import { nfregexServiceQuery } from '../../components/NFRegex/utils'; import { errorNotify, getErrorMessage, isMediumScreen } from '../../js/utils'; -import AddNewService from '../../components/NFRegex/AddNewService'; +import AddEditService from '../../components/NFRegex/AddEditService'; import AddNewRegex from '../../components/AddNewRegex'; import { useQueryClient } from '@tanstack/react-query'; import { TbReload } from 'react-icons/tb'; @@ -81,13 +81,12 @@ function NFRegex({ children }: { children: any }) { } - } {srv?children:null} {srv? : - + } }