Merge branch 'development'

This commit is contained in:
Jan Wagner 2022-12-27 19:02:33 +00:00
commit 4ec49556ac
4 changed files with 98 additions and 0 deletions

View file

@ -0,0 +1,22 @@
From 28553e8d1cc56de12e4c9f7705a92f0e0e86d9d9 Mon Sep 17 00:00:00 2001
From: RincewindsHat <12514511+RincewindsHat@users.noreply.github.com>
Date: Mon, 19 Dec 2022 17:15:49 +0100
Subject: [PATCH] Fix unknown escape sequence error output
---
plugins/check_apt.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/plugins/check_apt.c b/plugins/check_apt.c
index d7be57505..f70fec16b 100644
--- a/plugins/check_apt.c
+++ b/plugins/check_apt.c
@@ -530,7 +530,7 @@ print_help (void)
printf (" %s\n", _("this REGEXP, the plugin will return CRITICAL status. Can be specified"));
printf (" %s\n", _("multiple times like above. Default is a regexp matching security"));
printf (" %s\n", _("upgrades for Debian and Ubuntu:"));
- printf (" \t\%s\n", SECURITY_RE);
+ printf (" \t%s\n", SECURITY_RE);
printf (" %s\n", _("Note that the package must first match the include list before its"));
printf (" %s\n", _("information is compared against the critical list."));
printf (" %s\n", "-o, --only-critical");

View file

@ -0,0 +1,37 @@
From 763862a61cf5a7ba1a10f607022aac2434c79f57 Mon Sep 17 00:00:00 2001
From: Danijel Tasov <data@consol.de>
Date: Wed, 21 Dec 2022 14:48:11 +0100
Subject: [PATCH] make check_http faster with larger files
The current implementation becomes exponentially slower with growing
response size.
See also:
https://github.com/nagios-plugins/nagios-plugins/blob/release-2.4.2/plugins/check_http.c#L1199-L1204
---
plugins/check_http.c | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/plugins/check_http.c b/plugins/check_http.c
index 41d478163..1835a2d09 100644
--- a/plugins/check_http.c
+++ b/plugins/check_http.c
@@ -1095,9 +1095,14 @@ check_http (void)
*pos = ' ';
}
buffer[i] = '\0';
- xasprintf (&full_page_new, "%s%s", full_page, buffer);
- free (full_page);
+
+ if ((full_page_new = realloc(full_page, pagesize + i + 1)) == NULL)
+ die (STATE_UNKNOWN, _("HTTP UNKNOWN - Could not allocate memory for full_page\n"));
+
+ memmove(&full_page_new[pagesize], buffer, i + 1);
+
full_page = full_page_new;
+
pagesize += i;
if (no_body && document_headers_done (full_page)) {

View file

@ -0,0 +1,36 @@
From 765b29f09bd3bc2a938260caa5f263343aafadb7 Mon Sep 17 00:00:00 2001
From: Sven Nierlein <sven@nierlein.de>
Date: Thu, 22 Dec 2022 12:51:18 +0100
Subject: [PATCH] check_curl: fix checking large bodys (#1823)
check_curl fails on large pages:
HTTP CRITICAL - Invalid HTTP response received from host on port 5080: cURL returned 23 - Failure writing output to destination
for example trying to run check_curl on the test from #1822
I guess the idea is to double the buffer size each time it is to small. But the code
exponentially grows the buffer size which works well 2-3 times, but then fails.
---
plugins/check_curl.c | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/plugins/check_curl.c b/plugins/check_curl.c
index 2ad373c05..55de22fd2 100644
--- a/plugins/check_curl.c
+++ b/plugins/check_curl.c
@@ -2024,9 +2024,12 @@ curlhelp_buffer_write_callback (void *buffer, size_t size, size_t nmemb, void *s
curlhelp_write_curlbuf *buf = (curlhelp_write_curlbuf *)stream;
while (buf->bufsize < buf->buflen + size * nmemb + 1) {
- buf->bufsize *= buf->bufsize * 2;
+ buf->bufsize = buf->bufsize * 2;
buf->buf = (char *)realloc (buf->buf, buf->bufsize);
- if (buf->buf == NULL) return -1;
+ if (buf->buf == NULL) {
+ fprintf(stderr, "malloc failed (%d) %s\n", errno, strerror(errno));
+ return -1;
+ }
}
memcpy (buf->buf + buf->buflen, buffer, size * nmemb);

View file

@ -8,3 +8,6 @@
17_check_curl_detect_ipv6
18_check_icmp_help
19_check_disk_fs_usage
20_check_apt_unknown_escape_sequence
21_check_http_faster_with_large_files
22_check_curl_faster_with_large_files