From 9028342910e9edb0e45510062a46ef45a5823dab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophile=20Diot?= Date: Fri, 24 Jan 2025 08:54:27 +0100 Subject: [PATCH 1/7] [#1930] Refactor reverse proxy configuration to generate locations configuration in the same order as the one defined by the user --- .../confs/server-http/reverse-proxy.conf | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/common/core/reverseproxy/confs/server-http/reverse-proxy.conf b/src/common/core/reverseproxy/confs/server-http/reverse-proxy.conf index adc24f990..9b41c07d7 100644 --- a/src/common/core/reverseproxy/confs/server-http/reverse-proxy.conf +++ b/src/common/core/reverseproxy/confs/server-http/reverse-proxy.conf @@ -30,10 +30,18 @@ add_header X-Proxy-Cache $upstream_cache_status; {% endif %} {% endif %} - {% set counter = namespace(value=1) %} + {% set hosts = namespace() %} + {% set hosts.list = [] %} {% for k, v in all.items() %} - {% if k.startswith("REVERSE_PROXY_HOST") and v != "" -%} - {% set host = v %} + {% if k.startswith("REVERSE_PROXY_HOST") and v != "" %} + {% set suffix = k[18:] %} + {% if not suffix or suffix.startswith("_") %} + {% set index = suffix[1:] if suffix else "0" %} + {% set hosts.list = hosts.list + [(index|int, k, v)] %} + {% endif %} + {% endif %} + {% endfor %} + {% for index, k, host in hosts.list|sort %} {% set url = all[k.replace("HOST", "URL")] if k.replace("HOST", "URL") in all else "/" %} {% set ws = all[k.replace("HOST", "WS")] if k.replace("HOST", "WS") in all else "" %} {% set headers = all[k.replace("HOST", "HEADERS")] if k.replace("HOST", "HEADERS") in all else "" %} @@ -50,8 +58,8 @@ add_header X-Proxy-Cache $upstream_cache_status; {% set pass_request_body = all[k.replace("HOST", "PASS_REQUEST_BODY")] if k.replace("HOST", "PASS_REQUEST_BODY") in all else "" %} location {{ url }} { etag off; - set $backend{{ counter.value }} "{{ host }}"; - proxy_pass $backend{{ counter.value }}; + set $backend{{ index }} "{{ host }}"; + proxy_pass $backend{{ index }}; proxy_set_header Host {% if REVERSE_PROXY_CUSTOM_HOST != "" %}"{{ REVERSE_PROXY_CUSTOM_HOST }}"{% else %}$host{% endif %}; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Real-IP $remote_addr; @@ -104,7 +112,5 @@ location {{ url }} { {% endif %} } - {% set counter.value = counter.value + 1 %} - {% endif %} {% endfor %} {%- endif %} From cc1aadf280e330bea9ee9c88fe4e17f611654f85 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Jan 2025 08:49:52 +0000 Subject: [PATCH 2/7] deps/gha: bump github/codeql-action from 3.28.3 to 3.28.4 Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.28.3 to 3.28.4. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7...ee117c905ab18f32fa0f66c2fe40ecc8013f3e04) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/codeql.yml | 4 ++-- .github/workflows/scorecards-analysis.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 23a0da5c7..a25fa8f52 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -35,12 +35,12 @@ jobs: python -m pip install --no-cache-dir --require-hashes -r src/common/db/requirements.txt echo "CODEQL_PYTHON=$(which python)" >> $GITHUB_ENV - name: Initialize CodeQL - uses: github/codeql-action/init@dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7 # v3.28.3 + uses: github/codeql-action/init@ee117c905ab18f32fa0f66c2fe40ecc8013f3e04 # v3.28.4 with: languages: ${{ matrix.language }} config-file: ./.github/codeql.yml setup-python-dependencies: false - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7 # v3.28.3 + uses: github/codeql-action/analyze@ee117c905ab18f32fa0f66c2fe40ecc8013f3e04 # v3.28.4 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/scorecards-analysis.yml b/.github/workflows/scorecards-analysis.yml index 6e9a64026..33d00d1e7 100644 --- a/.github/workflows/scorecards-analysis.yml +++ b/.github/workflows/scorecards-analysis.yml @@ -25,6 +25,6 @@ jobs: results_format: sarif publish_results: true - name: "Upload SARIF results to code scanning" - uses: github/codeql-action/upload-sarif@dd196fa9ce80b6bacc74ca1c32bd5b0ba22efca7 # v3.28.3 + uses: github/codeql-action/upload-sarif@ee117c905ab18f32fa0f66c2fe40ecc8013f3e04 # v3.28.4 with: sarif_file: results.sarif From aff4aad30f4221fa854aa3746948963e262c7a35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophile=20Diot?= Date: Fri, 24 Jan 2025 10:41:25 +0100 Subject: [PATCH 3/7] Add bunkernet-send job to send reports periodically and improve logging --- src/common/core/bunkernet/bunkernet.lua | 144 ++++++------------ .../core/bunkernet/jobs/bunkernet-data.py | 6 +- .../core/bunkernet/jobs/bunkernet-register.py | 2 +- .../core/bunkernet/jobs/bunkernet-send.py | 143 +++++++++++++++++ src/common/core/bunkernet/jobs/bunkernet.py | 12 +- src/common/core/bunkernet/plugin.json | 6 + 6 files changed, 209 insertions(+), 104 deletions(-) create mode 100644 src/common/core/bunkernet/jobs/bunkernet-send.py diff --git a/src/common/core/bunkernet/bunkernet.lua b/src/common/core/bunkernet/bunkernet.lua index 754826b69..ad7635123 100644 --- a/src/common/core/bunkernet/bunkernet.lua +++ b/src/common/core/bunkernet/bunkernet.lua @@ -9,13 +9,9 @@ local bunkernet = class("bunkernet", plugin) local ngx = ngx local ERR = ngx.ERR local NOTICE = ngx.NOTICE -local WARN = ngx.WARN -local INFO = ngx.INFO local HTTP_INTERNAL_SERVER_ERROR = ngx.HTTP_INTERNAL_SERVER_ERROR local HTTP_OK = ngx.HTTP_OK -local timer_at = ngx.timer.at local get_phase = ngx.get_phase -local worker = ngx.worker local get_version = utils.get_version local get_integration = utils.get_integration local get_deny_status = utils.get_deny_status @@ -40,7 +36,7 @@ function bunkernet:initialize(ctx) plugin.initialize(self, "bunkernet", ctx) -- Get BunkerNet ID and save info if get_phase() ~= "init" and self:is_needed() then - local id, err = self.datastore:get("plugin_bunkernet_id", true) + local id, _ = self.datastore:get("plugin_bunkernet_id", true) if id then self.bunkernet_id = id self.version = get_version(self.ctx) @@ -174,7 +170,6 @@ function bunkernet:access() end function bunkernet:log(bypass_checks) - if not bypass_checks then -- Check if needed if not self:is_needed() then @@ -210,21 +205,23 @@ function bunkernet:log(bypass_checks) local report = { ["ip"] = self.ctx.bw.remote_addr, ["reason"] = reason, - ["reason_data"] = reason_data, + ["data"] = reason_data, ["method"] = self.ctx.bw.request_method, ["url"] = self.ctx.bw.request_uri, ["headers"] = ngx.req.get_headers(), - ["server_name"] = self.ctx.bw.server_name + ["server_name"] = self.ctx.bw.server_name, + ["date"] = os.date("!%Y-%m-%dT%H:%M:%SZ", ngx.time()), } ret, err = self.datastore.dict:rpush("plugin_bunkernet_reports", encode(report)) if not ret then return self:ret(false, "can't set IP report into datastore : " .. err) end -- Store in recent reports - ret, err = self.datastore:set("plugin_bunkernet_" .. self.ctx.bw.remote_addr .. "_" .. reason, "added", 3600) + ret, err = self.datastore:set("plugin_bunkernet_" .. self.ctx.bw.remote_addr .. "_" .. reason, "added", 5400) if not ret then return self:ret(false, "can't set IP added into datastore : " .. err) end + return self:ret(true, "IP added to reports") end return self:ret(true, "IP already added to reports recently") @@ -251,81 +248,6 @@ function bunkernet:log_stream() return self:log() end -function bunkernet:timer() - - -- Only execute on worker 0 - if worker.id() ~= 0 then - return self:ret(true, "skipped") - end - - -- Check if BunkerNet is activated - local is_needed, err = has_variable("USE_BUNKERNET", "yes") - if is_needed == nil then - return self:ret(false, "can't check USE_BUNKERNET variable : " .. err) - end - if not is_needed then - return self:ret(true, "no service uses BunkerNet, skipping init") - end - - local ret = true - local ret_err = "success" - - -- Get reports list length - local len, len_err = self.datastore:llen("plugin_bunkernet_reports") - if len == nil then - return self:ret(false, "can't get list length : " .. len_err) - end - - -- Loop on reports - local reports = {} - for i = 1, len do - -- Pop the report and decode it - local report, report_err = self.datastore:lpop("plugin_bunkernet_reports") - if not report then - self.logger:log(ERR, "can't lpop report : " .. report_err) - else - table_insert(reports, decode(report)) - end - end - - -- Send reports - local keep_reports = {} - local send = true - for i = 1, #reports do - if send then - local report = reports[i] - local ok, err, status, _ = self:report(report["ip"], report["reason"], report["reason_data"], report["method"], report["url"], report["headers"], report["server_name"]) - if status == 429 then - table_insert(keep_reports, report) - ret = false - ret_err = "bunkernet API is rate limiting us" - send = false - elseif not ok then - table_insert(keep_reports, report) - ret = false - ret_err = "can't report IP : " .. err - send = false - end - else - table_insert(keep_reports, report) - end - end - - -- Push unset reports - for i = 1, #keep_reports do - local set_ok, set_err = self.datastore.dict:rpush("plugin_bunkernet_reports", encode(keep_reports[i])) - if not set_ok then - ret = false - ret_err = set_err - end - end - - -- Show stats at INFO level - self.logger:log(INFO, "processed " .. tostring(#reports) .. " reports : " .. tostring(#reports - #keep_reports) .. " sent and " .. tostring(#keep_reports) .. " remaining") - - return self:ret(ret, ret_err) -end - function bunkernet:request(method, url, data) local httpc, err = http_new() if not httpc then @@ -420,7 +342,9 @@ end function bunkernet:api() -- Match request - if not match(self.ctx.bw.uri, "^/bunkernet/ping$") or self.ctx.bw.request_method ~= "POST" then + local is_ping = match(self.ctx.bw.uri, "^/bunkernet/ping$") and self.ctx.bw.request_method == "POST" + local is_reports = match(self.ctx.bw.uri, "^/bunkernet/reports$") and self.ctx.bw.request_method == "GET" + if not (is_ping or is_reports) then return self:ret(false, "success") end -- Check id @@ -430,22 +354,48 @@ function bunkernet:api() elseif not id then return self:ret(true, "missing instance ID", HTTP_INTERNAL_SERVER_ERROR) end - self.bunkernet_id = id - self.version = get_version(self.ctx) - self.integration = get_integration(self.ctx) - -- Send ping request - local ok, err, status, _ = self:ping() - if not ok then - return self:ret(true, "error while sending request to API : " .. err, HTTP_INTERNAL_SERVER_ERROR) - end - if status ~= 200 then + + if match(self.ctx.bw.uri, "^/bunkernet/ping$") then + self.bunkernet_id = id + self.version = get_version(self.ctx) + self.integration = get_integration(self.ctx) + -- Send ping request + local ok, err, status, _ = self:ping() + if not ok then + return self:ret(true, "error while sending request to API : " .. err, HTTP_INTERNAL_SERVER_ERROR) + end + if status ~= 200 then + return self:ret( + true, + "received status " .. tostring(status) .. " from API using instance ID " .. self.bunkernet_id, + HTTP_INTERNAL_SERVER_ERROR + ) + end return self:ret( true, - "received status " .. tostring(status) .. " from API using instance ID " .. self.bunkernet_id, - HTTP_INTERNAL_SERVER_ERROR + "connectivity with API using instance ID " .. self.bunkernet_id .. " is successful", + HTTP_OK ) + elseif match(self.ctx.bw.uri, "^/bunkernet/reports$") then + -- Get reports list length + local len, len_err = self.datastore:llen("plugin_bunkernet_reports") + if len == nil then + return self:ret(true, "can't get list length : " .. len_err, HTTP_INTERNAL_SERVER_ERROR) + end + -- Loop on reports + local reports = {} + for _ = 1, len do + -- Pop the report and decode it + local report, report_err = self.datastore:lpop("plugin_bunkernet_reports") + if not report then + self.logger:log(ERR, "can't lpop report : " .. report_err) + else + table_insert(reports, decode(report)) + end + end + -- Return reports + return self:ret(true, reports, HTTP_OK) end - return self:ret(true, "connectivity with API using instance ID " .. self.bunkernet_id .. " is successful", HTTP_OK) end return bunkernet diff --git a/src/common/core/bunkernet/jobs/bunkernet-data.py b/src/common/core/bunkernet/jobs/bunkernet-data.py index d4d64310b..3ad847eb9 100644 --- a/src/common/core/bunkernet/jobs/bunkernet-data.py +++ b/src/common/core/bunkernet/jobs/bunkernet-data.py @@ -14,7 +14,7 @@ from jobs import Job # type: ignore from common_utils import bytes_hash # type: ignore -LOGGER = setup_logger("BUNKERNET") +LOGGER = setup_logger("BUNKERNET.data") exit_status = 0 try: @@ -42,8 +42,6 @@ # Create empty file in case it doesn't exist ip_list_path = bunkernet_path.joinpath("ip.list") - if not ip_list_path.is_file(): - ip_list_path.touch(exist_ok=True) # Get ID from cache bunkernet_id = None @@ -76,7 +74,7 @@ LOGGER.warning("BunkerNet API is rate limiting us, trying again later...") sys_exit(0) elif status == 403: - LOGGER.warning("BunkerNet has banned this instance, retrying a register later...") + LOGGER.warning("BunkerNet has banned this instance, retrying to download data later...") sys_exit(0) try: diff --git a/src/common/core/bunkernet/jobs/bunkernet-register.py b/src/common/core/bunkernet/jobs/bunkernet-register.py index ed5f57be7..c5823bcff 100644 --- a/src/common/core/bunkernet/jobs/bunkernet-register.py +++ b/src/common/core/bunkernet/jobs/bunkernet-register.py @@ -12,7 +12,7 @@ from logger import setup_logger # type: ignore from jobs import Job # type: ignore -LOGGER = setup_logger("BUNKERNET") +LOGGER = setup_logger("BUNKERNET.register") exit_status = 0 try: diff --git a/src/common/core/bunkernet/jobs/bunkernet-send.py b/src/common/core/bunkernet/jobs/bunkernet-send.py new file mode 100644 index 000000000..bbb40f5f4 --- /dev/null +++ b/src/common/core/bunkernet/jobs/bunkernet-send.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 + +from datetime import datetime, timedelta +from itertools import chain +from json import dumps, loads +from os import getenv, sep +from os.path import join +from pathlib import Path +from sys import exit as sys_exit, path as sys_path +from time import sleep + +for deps_path in [join(sep, "usr", "share", "bunkerweb", *paths) for paths in (("deps", "python"), ("utils",), ("api",), ("db",))]: + if deps_path not in sys_path: + sys_path.append(deps_path) + +from bunkernet import send_reports +from heapq import merge + +from API import API # type: ignore +from ApiCaller import ApiCaller # type: ignore +from logger import setup_logger # type: ignore +from jobs import Job # type: ignore + +LOGGER = setup_logger("BUNKERNET.send") +exit_status = 0 + +BATCH_SIZE = 100 + +try: + # Check if at least a server has BunkerNet activated + bunkernet_activated = False + # Multisite case + if getenv("MULTISITE", "no") == "yes": + for first_server in getenv("SERVER_NAME", "").split(" "): + if getenv(f"{first_server}_USE_BUNKERNET", getenv("USE_BUNKERNET", "yes")) == "yes": + bunkernet_activated = True + break + # Singlesite case + elif getenv("USE_BUNKERNET", "yes") == "yes": + bunkernet_activated = True + + if not bunkernet_activated: + LOGGER.info("BunkerNet is not activated, skipping download...") + sys_exit(0) + + # Create directory if it doesn't exist + bunkernet_path = Path(sep, "var", "cache", "bunkerweb", "bunkernet") + bunkernet_path.mkdir(parents=True, exist_ok=True) + + JOB = Job(LOGGER, __file__) + + db_metadata = JOB.db.get_metadata() + + if isinstance(db_metadata, str) or db_metadata["scheduler_first_start"]: + LOGGER.info("First start of the scheduler, skipping send...") + sys_exit(0) + + # Get ID from cache + bunkernet_id = None + bunkernet_id = JOB.get_cache("instance.id") + if bunkernet_id: + bunkernet_path.joinpath("instance.id").write_bytes(bunkernet_id) + LOGGER.info("Successfully retrieved BunkerNet ID from db cache") + else: + LOGGER.info("No BunkerNet ID found in db cache") + + # Check if ID is present + if not bunkernet_path.joinpath("instance.id").is_file(): + LOGGER.error("Not sending BunkerNet data because instance is not registered") + sys_exit(2) + + # Create API instances for each database instance + apis = [API(f"http://{instance['hostname']}:{instance['port']}", instance["server_name"]) for instance in JOB.db.get_instances()] + + apiCaller = ApiCaller(apis) + + # Get reports from all instances + resp, instances_data = apiCaller.send_to_apis("GET", "/bunkernet/reports", response=True) + + instance_reports = [] + if resp: + # Extract and sort requests using chain to flatten the nested lists + instance_reports = list(chain.from_iterable((data.get("msg", []) for data in instances_data.values() if data.get("status", "ko") == "success"))) + + cached_data = loads(JOB.get_cache("reports.json") or "{}") + + if not cached_data: + cached_data = {"created": datetime.now().astimezone().isoformat(), "reports": []} + + # Merge reports and sort by the oldest first using heapq merge + reports = list(merge(cached_data.get("reports", []), instance_reports, key=lambda x: datetime.fromisoformat(x["date"]))) + + # Check if forced send is needed due to time + force_send = datetime.fromisoformat(cached_data["created"]) + timedelta(hours=24) < datetime.now().astimezone() + + # Process reports in batches of 100 + remaining = len(reports) + while force_send or remaining >= BATCH_SIZE: + force_send = False + + batch, reports = reports[:BATCH_SIZE], reports[BATCH_SIZE:] + + LOGGER.info(f"Sending {len(batch)} / {remaining} reports to BunkerNet API ...") + ok, status, data = send_reports(batch) + + if not ok or status in (429, 403): + reports = batch + reports # Add batch back to reports + remaining = len(reports) + + if not ok: + LOGGER.error(f"Error while sending data to BunkerNet API: {data}") + elif status == 429: + LOGGER.warning("BunkerNet API rate limit reached, will retry later") + else: # status == 403 + LOGGER.warning("BunkerNet instance banned, will retry later") + break + + remaining = len(reports) + + if remaining >= BATCH_SIZE: + LOGGER.info("Sleeping 2 seconds before next batch...") + sleep(2) + + if reports: + LOGGER.info(f"Caching {remaining} reports...") + cached_data["reports"] = reports + + # Cache the remaining reports + cached, err = JOB.cache_file("reports.json", dumps(cached_data, indent=2).encode()) + if not cached: + LOGGER.error(f"Failed to cache reports.json :\n{err}") + status = 2 + else: + deleted, err = JOB.del_cache("reports.json") + if not deleted: + LOGGER.warning(f"Couldn't delete reports.json from cache : {err}") +except SystemExit as e: + exit_status = e.code +except BaseException as e: + exit_status = 2 + LOGGER.error(f"Exception while running bunkernet-send.py :\n{e}") + +sys_exit(exit_status) diff --git a/src/common/core/bunkernet/jobs/bunkernet.py b/src/common/core/bunkernet/jobs/bunkernet.py index 96fccd7d6..d548517c8 100644 --- a/src/common/core/bunkernet/jobs/bunkernet.py +++ b/src/common/core/bunkernet/jobs/bunkernet.py @@ -3,12 +3,14 @@ from os import getenv, sep from pathlib import Path from requests import request as requests_request, ReadTimeout -from typing import Literal, Optional, Tuple, Union +from typing import Any, Dict, List, Literal, Optional, Tuple, Union from common_utils import get_os_info, get_integration, get_version # type: ignore -def request(method: Literal["POST", "GET"], url: str, _id: Optional[str] = None) -> Tuple[bool, Optional[int], Union[str, dict]]: +def request( + method: Literal["POST", "GET"], url: str, _id: Optional[str] = None, *, additional_data: Dict[str, Any] = None +) -> Tuple[bool, Optional[int], Union[str, dict]]: data = { "integration": get_integration(), "version": get_version(), @@ -16,6 +18,8 @@ def request(method: Literal["POST", "GET"], url: str, _id: Optional[str] = None) } if _id: data["id"] = _id + if additional_data: + data.update(additional_data) try: resp = requests_request( @@ -54,5 +58,9 @@ def data() -> Tuple[bool, Optional[int], Union[str, dict]]: return request("GET", "/db", _id=get_id()) +def send_reports(reports: List[Dict[str, Any]]) -> Tuple[bool, Optional[int], Union[str, dict]]: + return request("POST", "/report", _id=get_id(), additional_data={"reports": reports}) + + def get_id() -> str: return Path(sep, "var", "cache", "bunkerweb", "bunkernet", "instance.id").read_text(encoding="utf-8").strip() diff --git a/src/common/core/bunkernet/plugin.json b/src/common/core/bunkernet/plugin.json index 127ee71e6..dc7aa6e2d 100644 --- a/src/common/core/bunkernet/plugin.json +++ b/src/common/core/bunkernet/plugin.json @@ -36,6 +36,12 @@ "file": "bunkernet-data.py", "every": "day", "reload": true + }, + { + "name": "bunkernet-send", + "file": "bunkernet-send.py", + "every": "hour", + "reload": true } ] } From d34b919c3f6a4601366d7b1ec41c174ee0f6b456 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophile=20Diot?= Date: Fri, 24 Jan 2025 10:41:34 +0100 Subject: [PATCH 4/7] Fix reports URL in menu template to point directly to reports page --- src/ui/app/templates/menu.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ui/app/templates/menu.html b/src/ui/app/templates/menu.html index 8197f6b56..71a319e0f 100644 --- a/src/ui/app/templates/menu.html +++ b/src/ui/app/templates/menu.html @@ -24,7 +24,7 @@ "configs": {"url": url_for('configs'), "icon": "bx-wrench"}, "plugins": {"url": url_for('plugins'), "icon": "bx-plug"}, "cache": {"url": url_for('cache'), "icon": "bx-data"}, - "reports": {"url": url_for("loading", next=url_for("reports")), "icon": "bxs-flag-checkered"}, + "reports": {"url": url_for("reports"), "icon": "bxs-flag-checkered"}, "bans": {"url": url_for('bans'), "icon": "bx-block"}, "jobs": {"url": url_for('jobs'), "icon": "bx-time-five"}, "logs": {"url": url_for('logs'), "icon": "bx-file-find"}, From 4d18c0650d22250fdde2c875b9ee1dbd3708e54e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophile=20Diot?= Date: Fri, 24 Jan 2025 10:41:54 +0100 Subject: [PATCH 5/7] Remove redundant check for empty directories in job cleanup process --- src/common/utils/jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/common/utils/jobs.py b/src/common/utils/jobs.py index 3cba383b7..7cf61354d 100644 --- a/src/common/utils/jobs.py +++ b/src/common/utils/jobs.py @@ -129,7 +129,7 @@ def restore_cache(self, *, job_name: str = "", plugin_id: str = "", manual: bool rmtree(file.parent, ignore_errors=True) if file.parent == self.job_path: break - elif file.is_dir() and not list(file.iterdir()): + elif file.is_dir(): self.logger.debug(f"Removing empty directory {file}") rmtree(file, ignore_errors=True) From 501ad106f1f11173ffc23a09b55be6f64521b71c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophile=20Diot?= Date: Fri, 24 Jan 2025 11:10:13 +0100 Subject: [PATCH 6/7] Update BunkerNet's logic to send reports in bulk instead of one by one --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 68f237992..49f48e062 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## v1.6.0-rc3 - ????/??/?? +- [FEATURE] Update BunkerNet's logic to send reports in bulk instead of one by one - [AUTOCONF] Add the possibility to add/override settings via ConfigMap in Kubernetes using the `bunkerweb.io/CONFIG_TYPE=settings` annotation - [UI] Add support page for easy logs and configuration sharing while anonymizing sensitive data - [LINUX] Support Fedora 41 From e0f9f6840511f03f496db5c80f9ad116191a3f7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophile=20Diot?= Date: Fri, 24 Jan 2025 11:19:50 +0100 Subject: [PATCH 7/7] Update Kubernetes, Pydantic, and S3Transfer dependencies to latest versions --- src/autoconf/requirements.in | 2 +- src/autoconf/requirements.txt | 6 +++--- src/scheduler/requirements.in | 2 +- src/scheduler/requirements.txt | 24 ++++++++++++------------ 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/autoconf/requirements.in b/src/autoconf/requirements.in index 26a3b3e2a..dfe6220a1 100644 --- a/src/autoconf/requirements.in +++ b/src/autoconf/requirements.in @@ -1,3 +1,3 @@ docker==7.1.0 -kubernetes==31.0.0 +kubernetes==32.0.0 pytz==2024.2 diff --git a/src/autoconf/requirements.txt b/src/autoconf/requirements.txt index 12741d489..c026b3ce7 100644 --- a/src/autoconf/requirements.txt +++ b/src/autoconf/requirements.txt @@ -124,9 +124,9 @@ idna==3.10 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -kubernetes==31.0.0 \ - --hash=sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0 \ - --hash=sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1 +kubernetes==32.0.0 \ + --hash=sha256:319fa840345a482001ac5d6062222daeb66ec4d1bcb3087402aed685adf0aecb \ + --hash=sha256:60fd8c29e8e43d9c553ca4811895a687426717deba9c0a66fb2dcc3f5ef96692 # via -r requirements.in oauthlib==3.2.2 \ --hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \ diff --git a/src/scheduler/requirements.in b/src/scheduler/requirements.in index 78fdf06ec..cafe98fa3 100644 --- a/src/scheduler/requirements.in +++ b/src/scheduler/requirements.in @@ -16,7 +16,7 @@ certbot-dns-scaleway==0.0.7 cryptography==44.0.0 importlib-metadata==8.6.1 maxminddb==2.6.3 -pydantic==2.10.5 +pydantic==2.10.6 python-magic==0.4.27 requests==2.32.3 schedule==1.2.2 diff --git a/src/scheduler/requirements.txt b/src/scheduler/requirements.txt index 0e6f4063a..af587c017 100644 --- a/src/scheduler/requirements.txt +++ b/src/scheduler/requirements.txt @@ -35,13 +35,13 @@ beautifulsoup4==4.12.3 \ --hash=sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051 \ --hash=sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed # via dns-lexicon -boto3==1.36.4 \ - --hash=sha256:9f8f699e75ec63fcc98c4dd7290997c7c06c68d3ac8161ad4735fe71f5fe945c \ - --hash=sha256:eeceeb74ef8b65634d358c27aa074917f4449dc828f79301f1075232618eb502 +boto3==1.36.5 \ + --hash=sha256:58a6b7c3d5145b3ac04d4b6caa76223b8ef88004b4237444e553041e29581a11 \ + --hash=sha256:a404ad5ec94ff40c176215a991bf62f0db5514a93a3dd361b7b2ab9660f811f4 # via certbot-dns-route53 -botocore==1.36.4 \ - --hash=sha256:3f183aa7bb0c1ba02171143a05f28a4438abdf89dd6b8c0a7727040375a90520 \ - --hash=sha256:ef54f5e3316040b6ff775941e6ed052c3230dda0079d17d9f9e3c757375f2027 +botocore==1.36.5 \ + --hash=sha256:234ed3d29a8954c37a551c933453bf14c6ae44a69a4f273ffef377a2612ca6a6 \ + --hash=sha256:6d9f70afa9bf9d21407089dc22b8cc8ec6fa44866d4660858c062c74fc8555eb # via # boto3 # s3transfer @@ -549,9 +549,9 @@ pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pydantic==2.10.5 \ - --hash=sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff \ - --hash=sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53 +pydantic==2.10.6 \ + --hash=sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584 \ + --hash=sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236 # via -r requirements.in pydantic-core==2.27.2 \ --hash=sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278 \ @@ -776,9 +776,9 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -s3transfer==0.11.1 \ - --hash=sha256:3f25c900a367c8b7f7d8f9c34edc87e300bde424f779dc9f0a8ae4f9df9264f6 \ - --hash=sha256:8fa0aa48177be1f3425176dfe1ab85dcd3d962df603c3dbfc585e6bf857ef0ff +s3transfer==0.11.2 \ + --hash=sha256:3b39185cb72f5acc77db1a58b6e25b977f28d20496b6e58d6813d75f464d632f \ + --hash=sha256:be6ecb39fadd986ef1701097771f87e4d2f821f27f6071c872143884d2950fbc # via boto3 schedule==1.2.2 \ --hash=sha256:15fe9c75fe5fd9b9627f3f19cc0ef1420508f9f9a46f45cd0769ef75ede5f0b7 \