From e87d89da2ea604fe2b05cc8184846f53e2f3ead8 Mon Sep 17 00:00:00 2001 From: Marius Dinu Date: Thu, 4 Apr 2024 20:45:11 +0300 Subject: [PATCH 01/20] audit: move from packages to openwrt This package is joined with libaudit from openwrt base packages. Signed-off-by: Marius Dinu --- utils/audit/Makefile | 143 ------------------ utils/audit/files/audit.init | 16 -- ...tue-functions-for-strndupa-rawmemchr.patch | 122 --------------- utils/audit/patches/0002-fix-gcc-10.patch | 21 --- ...acket-interpretation-dependent-on-th.patch | 52 ------- 5 files changed, 354 deletions(-) delete mode 100644 utils/audit/Makefile delete mode 100644 utils/audit/files/audit.init delete mode 100644 utils/audit/patches/0001-Add-substitue-functions-for-strndupa-rawmemchr.patch delete mode 100644 utils/audit/patches/0002-fix-gcc-10.patch delete mode 100644 utils/audit/patches/0003-Make-IPX-packet-interpretation-dependent-on-th.patch diff --git a/utils/audit/Makefile b/utils/audit/Makefile deleted file mode 100644 index b610b55c52..0000000000 --- a/utils/audit/Makefile +++ /dev/null @@ -1,143 +0,0 @@ -# -# This is free software, licensed under the GNU General Public License v2. -# See /LICENSE for more information. -# - -include $(TOPDIR)/rules.mk - -PKG_NAME:=audit -PKG_VERSION:=2.8.5 -PKG_RELEASE:=7 - -PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz -PKG_SOURCE_URL:=http://people.redhat.com/sgrubb/audit -PKG_HASH:=0e5d4103646e00f8d1981e1cd2faea7a2ae28e854c31a803e907a383c5e2ecb7 - -PKG_MAINTAINER:=Thomas Petazzoni -PKG_LICENSE:=GPL-2.0-or-later -PKG_LICENSE_FILES:=COPYING -PKG_CPE_ID:=cpe:/a:linux_audit_project:linux_audit - -PKG_FIXUP:=autoreconf -PKG_BUILD_DIR=$(BUILD_DIR)/$(PKG_NAME)-packages/$(PKG_NAME)-$(PKG_VERSION) - -PKG_BUILD_FLAGS:=no-mips16 - -include $(INCLUDE_DIR)/package.mk - -define Package/audit/Default - TITLE:=Audit Daemon - URL:=http://people.redhat.com/sgrubb/audit/ -endef - -define Package/audit/Default/description - The audit package contains the user space utilities for - storing and searching the audit records generated by - the audit subsystem in the Linux 2.6 kernel -endef - -define Package/libauparse -$(call Package/audit/Default) - SECTION:=libs - CATEGORY:=Libraries - TITLE+= (parsing shared library) - DEPENDS:= +libaudit -endef - -define Package/libauparse/description -$(call Package/audit/Default/description) - This package contains the audit parsing shared library. -endef - -define Package/audit-utils -$(call Package/audit/Default) - SECTION:=utils - CATEGORY:=Utilities - TITLE+= (utilities) - DEPENDS:= +libaudit +libauparse -endef - -define Package/audit-utils/description -$(call Package/audit/Default/description) - This package contains the audit utilities. -endef - -define Package/audit -$(call Package/audit/Default) - SECTION:=utils - CATEGORY:=Utilities - TITLE+= (daemon) - DEPENDS:= +libaudit +libauparse +audit-utils +libev -endef - -define Package/audit/description -$(call Package/audit/Default/description) - This package contains the audit daemon. -endef - -CONFIGURE_VARS += \ - LDFLAGS_FOR_BUILD="$(HOST_LDFLAGS)" \ - CPPFLAGS_FOR_BUILD="$(HOST_CPPFLAGS)" \ - CFLAGS_FOR_BUILD="$(HOST_CFLAGS)" \ - CC_FOR_BUILD="$(HOSTCC)" - -CONFIGURE_ARGS += \ - --without-libcap-ng \ - --disable-systemd \ - --without-python \ - --without-python3 \ - --disable-zos-remote - -ifeq ($(ARCH),aarch64) -CONFIGURE_ARGS += --with-aarch64 -else ifeq ($(ARCH),arm) -CONFIGURE_ARGS += --with-arm -endif - -# We can't use the default, as the default passes $(MAKE_ARGS), which -# overrides CC, CFLAGS, etc. and defeats the *_FOR_BUILD definitions -# passed in CONFIGURE_VARS -define Build/Compile - $(MAKE) $(PKG_JOBS) -C $(PKG_BUILD_DIR)/$(MAKE_PATH) -endef - -define Build/Install - $(call Build/Install/Default,install) - $(SED) 's%^dispatcher *=.*%dispatcher = /usr/sbin/audispd%' $(PKG_INSTALL_DIR)/etc/audit/auditd.conf -endef - -define Build/InstallDev - $(INSTALL_DIR) $(1)/usr/include - $(CP) $(PKG_INSTALL_DIR)/usr/include/* $(1)/usr/include/ - $(INSTALL_DIR) $(1)/usr/lib/pkgconfig - $(INSTALL_DATA) $(PKG_INSTALL_DIR)/usr/lib/pkgconfig/*.pc $(1)/usr/lib/pkgconfig/ - $(INSTALL_DIR) $(1)/usr/lib - $(CP) $(PKG_INSTALL_DIR)/usr/lib/* $(1)/usr/lib/ -endef - -define Package/libauparse/install - $(INSTALL_DIR) $(1)/usr/lib - $(CP) $(PKG_INSTALL_DIR)/usr/lib/libauparse.so.* $(1)/usr/lib/ -endef - -define Package/audit-utils/install - $(INSTALL_DIR) $(1)/usr/bin - $(CP) $(PKG_INSTALL_DIR)/usr/bin/* $(1)/usr/bin/ - $(INSTALL_DIR) $(1)/usr/sbin - $(CP) \ - $(PKG_INSTALL_DIR)/usr/sbin/{augenrules,audispd,audisp-remote,auditctl,autrace,aureport,ausearch} \ - $(1)/usr/sbin/ -endef - -define Package/audit/install - $(INSTALL_DIR) $(1)/etc/audit - $(CP) $(PKG_INSTALL_DIR)/etc/audit/* $(1)/etc/audit/ - $(INSTALL_DIR) $(1)/etc/init.d - $(INSTALL_BIN) ./files/audit.init $(1)/etc/init.d/audit - $(INSTALL_DIR) $(1)/usr/sbin - $(CP) $(PKG_INSTALL_DIR)/usr/sbin/auditd $(1)/usr/sbin/ -endef - -$(eval $(call BuildPackage,libauparse)) -$(eval $(call BuildPackage,audit-utils)) -$(eval $(call BuildPackage,audit)) diff --git a/utils/audit/files/audit.init b/utils/audit/files/audit.init deleted file mode 100644 index 4a9f53884b..0000000000 --- a/utils/audit/files/audit.init +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh /etc/rc.common -# Copyright (c) 2014 OpenWrt.org - -START=11 - -USE_PROCD=1 -PROG=/usr/sbin/auditd - -start_service() { - mkdir -p /var/log/audit - procd_open_instance - procd_set_param command "$PROG" -n - procd_set_param respawn - procd_close_instance - test -f /etc/audit/rules.d/audit.rules && /usr/sbin/auditctl -R /etc/audit/rules.d/audit.rules -} diff --git a/utils/audit/patches/0001-Add-substitue-functions-for-strndupa-rawmemchr.patch b/utils/audit/patches/0001-Add-substitue-functions-for-strndupa-rawmemchr.patch deleted file mode 100644 index 2fa5c5933f..0000000000 --- a/utils/audit/patches/0001-Add-substitue-functions-for-strndupa-rawmemchr.patch +++ /dev/null @@ -1,122 +0,0 @@ -From c39a071e7c021f6ff3554aca2758e97b47a9777c Mon Sep 17 00:00:00 2001 -From: Steve Grubb -Date: Tue, 26 Feb 2019 18:33:33 -0500 -Subject: [PATCH] Add substitue functions for strndupa & rawmemchr - -(cherry picked from commit d579a08bb1cde71f939c13ac6b2261052ae9f77e) -Signed-off-by: Thomas Petazzoni ---- - auparse/auparse.c | 12 +++++++++++- - auparse/interpret.c | 9 ++++++++- - configure.ac | 14 +++++++++++++- - src/ausearch-lol.c | 12 +++++++++++- - 4 files changed, 43 insertions(+), 4 deletions(-) - ---- a/auparse/auparse.c -+++ b/auparse/auparse.c -@@ -1,5 +1,5 @@ - /* auparse.c -- -- * Copyright 2006-08,2012-17 Red Hat Inc., Durham, North Carolina. -+ * Copyright 2006-08,2012-19 Red Hat Inc., Durham, North Carolina. - * All Rights Reserved. - * - * This library is free software; you can redistribute it and/or -@@ -1118,6 +1118,16 @@ static int str2event(char *s, au_event_t - return 0; - } - -+#ifndef HAVE_STRNDUPA -+static inline char *strndupa(const char *old, size_t n) -+{ -+ size_t len = strnlen(old, n); -+ char *tmp = alloca(len + 1); -+ tmp[len] = 0; -+ return memcpy(tmp, old, len); -+} -+#endif -+ - /* Returns 0 on success and 1 on error */ - static int extract_timestamp(const char *b, au_event_t *e) - { ---- a/auparse/interpret.c -+++ b/auparse/interpret.c -@@ -853,6 +853,13 @@ err_out: - return print_escaped(id->val); - } - -+// rawmemchr is faster. Let's use it if we have it. -+#ifdef HAVE_RAWMEMCHR -+#define STRCHR rawmemchr -+#else -+#define STRCHR strchr -+#endif -+ - static const char *print_proctitle(const char *val) - { - char *out = (char *)print_escaped(val); -@@ -863,7 +870,7 @@ static const char *print_proctitle(const - // Proctitle has arguments separated by NUL bytes - // We need to write over the NUL bytes with a space - // so that we can see the arguments -- while ((ptr = rawmemchr(ptr, '\0'))) { -+ while ((ptr = STRCHR(ptr, '\0'))) { - if (ptr >= end) - break; - *ptr = ' '; ---- a/configure.ac -+++ b/configure.ac -@@ -1,7 +1,7 @@ - dnl - define([AC_INIT_NOTICE], - [### Generated automatically using autoconf version] AC_ACVERSION [ --### Copyright 2005-18 Steve Grubb -+### Copyright 2005-19 Steve Grubb - ### - ### Permission is hereby granted, free of charge, to any person obtaining a - ### copy of this software and associated documentation files (the "Software"), -@@ -72,6 +72,18 @@ dnl; posix_fallocate is used in audisp-r - AC_CHECK_FUNCS([posix_fallocate]) - dnl; signalfd is needed for libev - AC_CHECK_FUNC([signalfd], [], [ AC_MSG_ERROR([The signalfd system call is necessary for auditd]) ]) -+dnl; check if rawmemchr is available -+AC_CHECK_FUNCS([rawmemchr]) -+dnl; check if strndupa is available -+AC_LINK_IFELSE( -+ [AC_LANG_SOURCE( -+ [[ -+ #define _GNU_SOURCE -+ #include -+ int main() { (void) strndupa("test", 10); return 0; }]])], -+ [AC_DEFINE(HAVE_STRNDUPA, 1, [Let us know if we have it or not])], -+ [] -+) - - ALLWARNS="" - ALLDEBUG="-g" ---- a/src/ausearch-lol.c -+++ b/src/ausearch-lol.c -@@ -1,6 +1,6 @@ - /* - * ausearch-lol.c - linked list of linked lists library --* Copyright (c) 2008,2010,2014,2016 Red Hat Inc., Durham, North Carolina. -+* Copyright (c) 2008,2010,2014,2016,2019 Red Hat Inc., Durham, North Carolina. - * All Rights Reserved. - * - * This software may be freely redistributed and/or modified under the -@@ -152,6 +152,16 @@ static int compare_event_time(event *e1, - return 0; - } - -+#ifndef HAVE_STRNDUPA -+static inline char *strndupa(const char *old, size_t n) -+{ -+ size_t len = strnlen(old, n); -+ char *tmp = alloca(len + 1); -+ tmp[len] = 0; -+ return memcpy(tmp, old, len); -+} -+#endif -+ - /* - * This function will look at the line and pick out pieces of it. - */ diff --git a/utils/audit/patches/0002-fix-gcc-10.patch b/utils/audit/patches/0002-fix-gcc-10.patch deleted file mode 100644 index 19c0c4c3bc..0000000000 --- a/utils/audit/patches/0002-fix-gcc-10.patch +++ /dev/null @@ -1,21 +0,0 @@ -From 017e6c6ab95df55f34e339d2139def83e5dada1f Mon Sep 17 00:00:00 2001 -From: Steve Grubb -Date: Fri, 10 Jan 2020 21:13:50 -0500 -Subject: [PATCH 01/30] Header definitions need to be external when building - with -fno-common (which is default in GCC 10) - Tony Jones - ---- - src/ausearch-common.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - ---- a/src/ausearch-common.h -+++ b/src/ausearch-common.h -@@ -50,7 +50,7 @@ extern pid_t event_pid; - extern int event_exact_match; - extern uid_t event_uid, event_euid, event_loginuid; - extern const char *event_tuid, *event_teuid, *event_tauid; --slist *event_node_list; -+extern slist *event_node_list; - extern const char *event_comm; - extern const char *event_filename; - extern const char *event_hostname; diff --git a/utils/audit/patches/0003-Make-IPX-packet-interpretation-dependent-on-th.patch b/utils/audit/patches/0003-Make-IPX-packet-interpretation-dependent-on-th.patch deleted file mode 100644 index f4b4492c22..0000000000 --- a/utils/audit/patches/0003-Make-IPX-packet-interpretation-dependent-on-th.patch +++ /dev/null @@ -1,52 +0,0 @@ -From 6b09724c69d91668418ddb3af00da6db6755208c Mon Sep 17 00:00:00 2001 -From: Steve Grubb -Date: Thu, 2 Sep 2021 15:01:12 -0400 -Subject: [PATCH] Make IPX packet interpretation dependent on the ipx header - file existing - ---- a/auparse/interpret.c -+++ b/auparse/interpret.c -@@ -44,8 +44,10 @@ - #include - #include - #include --#include // FIXME: remove when ipx.h is fixed --#include -+#ifdef HAVE_IPX_HEADERS -+ #include // FIXME: remove when ipx.h is fixed -+ #include -+#endif - #include - #include - #include -@@ -1158,6 +1160,7 @@ static const char *print_sockaddr(const - x->sax25_call.ax25_call[6]); - } - break; -+#ifdef HAVE_IPX_HEADERS - case AF_IPX: - { - const struct sockaddr_ipx *ip = -@@ -1167,6 +1170,7 @@ static const char *print_sockaddr(const - str, ip->sipx_port, ip->sipx_network); - } - break; -+#endif - case AF_ATMPVC: - { - const struct sockaddr_atmpvc* at = ---- a/configure.ac -+++ b/configure.ac -@@ -414,6 +414,12 @@ if test x"$LIBWRAP_LIBS" != "x"; then - AC_DEFINE_UNQUOTED(HAVE_LIBWRAP, [], Define if tcp_wrappers support is enabled ) - fi - -+# linux/ipx.h - deprecated in 2018 -+AC_CHECK_HEADER(linux/ipx.h, ipx_headers=yes, ipx_headers=no) -+if test $ipx_headers = yes ; then -+ AC_DEFINE(HAVE_IPX_HEADERS,1,[IPX packet interpretation]) -+fi -+ - # See if we want to support lower capabilities for plugins - LIBCAP_NG_PATH - From 8b08b29271e9f8d26ce8d337ffb4261ea8a25914 Mon Sep 17 00:00:00 2001 From: Goetz Goerisch Date: Fri, 19 Apr 2024 18:34:19 +0200 Subject: [PATCH 02/20] jool: update documentation * corrected the documentation links for upstream * fixed style to be correctly rendered * add reference to OpenWrt tutorial Signed-off-by: Goetz Goerisch --- net/jool/files/readme.md | 39 +++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/net/jool/files/readme.md b/net/jool/files/readme.md index 88f978d9dc..0638d786e7 100644 --- a/net/jool/files/readme.md +++ b/net/jool/files/readme.md @@ -1,28 +1,35 @@ -# [Jool](https://www.jool.mx) +# [Jool](https://nicmx.github.io/Jool/en/index.html) ## Documentation -[See here](https://www.jool.mx/en/documentation.html). +[See here](https://nicmx.github.io/Jool/en/documentation.html). -You might also want to see [contact info](https://www.jool.mx/en/contact.html). +You might also want to see [contact info](https://nicmx.github.io/Jool/en/contact.html). ## Usage ### Start script This package includes a start script that will: - 1. Read the configuration file `/etc/config/jool` - 2. Determine what services are active - 3. Run jool with procd -For now this means that: - * The services will be disabled by default in the uci config `(/etc/config/jool)` - * The only uci configuration support available for the package is to enable or disable each instance or the entire deamon - * There is no uci support and configuration will be saved at `/etc/jool/* - * Only one instance of jool(nat64) can run with the boot script - * Only one instance of jool(siit) can run with the boot script - * For now there is no way of overriding of the configuration file's paths + 1. Read the configuration file `/etc/config/jool` + 2. Determine what services are active + 3. Run `jool` with procd -The configuration files the startup script useses for each jool instance are: - * jool(nat64): `/etc/jool/jool-nat64.conf.json` - * jool(siit): `/etc/jool/jool-siit.conf.json` +### For now this means that + +- The services will be disabled by default in the uci config `(/etc/config/jool)` +- The only uci configuration support available for the package is to enable or disable each instance or the entire deamon +- There is no uci support and configuration will be saved at `/etc/jool/` +- Only one instance of jool(nat64) can run with the boot script +- Only one instance of jool(siit) can run with the boot script +- For now there is no way of overriding of the configuration file's paths + +The configuration files the startup script uses for each jool instance are: + +- jool(nat64): `/etc/jool/jool-nat64.conf.json` +- jool(siit): `/etc/jool/jool-siit.conf.json` + +### OpenWrt tutorial + +For a more detailed tutorial refer to this [wiki page](https://openwrt.org/docs/guide-user/network/ipv6/nat64). From 26c101edc3e918be4fbfe76b3514d1c8398f7d31 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Fri, 9 Oct 2020 15:31:01 -1000 Subject: [PATCH 03/20] CI: remove CircleCI for now The GitHub CI offers currenlty more architecture and the Signed-of-by test is covered via the DOC CI test. In case GitHub ever changes policies, we can simply switch back. Signed-off-by: Paul Spooren --- .circleci/Dockerfile | 93 ---------------------- .circleci/README | 6 -- .circleci/config.yml | 182 ------------------------------------------- 3 files changed, 281 deletions(-) delete mode 100644 .circleci/Dockerfile delete mode 100644 .circleci/README delete mode 100644 .circleci/config.yml diff --git a/.circleci/Dockerfile b/.circleci/Dockerfile deleted file mode 100644 index b2914c38b9..0000000000 --- a/.circleci/Dockerfile +++ /dev/null @@ -1,93 +0,0 @@ -FROM debian:10 - - -# Configuration version history -# v1.0 - Initial version by Etienne Champetier -# v1.0.1 - Run as non-root, add unzip, xz-utils -# v1.0.2 - Add bzr -# v1.0.3 - Verify usign signatures -# v1.0.4 - Add support for Python3 -# v1.0.5 - Add 19.07 public keys, verify keys -# v1.0.6 - Add 21.02 public keys, update Debian image to version 10, add rsync -# v1.0.7 - Add 22.03 public keys, 18.06 v2 gpg key, 18.06 usign key - -RUN apt update && apt install -y \ -build-essential \ -bzr \ -curl \ -jq \ -gawk \ -gettext \ -git \ -libncurses5-dev \ -libssl-dev \ -python \ -python3 \ -signify-openbsd \ -subversion \ -rsync \ -time \ -unzip \ -wget \ -xz-utils \ -zlib1g-dev \ -&& rm -rf /var/lib/apt/lists/* - -RUN useradd -c "OpenWrt Builder" -m -d /home/build -s /bin/bash build -USER build -ENV HOME /home/build - -# OpenWrt Build System (PGP key for unattended snapshot builds) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/626471F1.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::54CC74307A2C6DC9CE618269CD84BCED626471F1:$' \ - && echo '54CC74307A2C6DC9CE618269CD84BCED626471F1:6:' | gpg --import-ownertrust - -# OpenWrt Build System (PGP key for 17.01 "Reboot" release builds) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/D52BBB6B.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::B09BE781AE8A0CD4702FDCD3833C6010D52BBB6B:$' \ - && echo 'B09BE781AE8A0CD4702FDCD3833C6010D52BBB6B:6:' | gpg --import-ownertrust - -# OpenWrt Release Builder (18.06 Signing Key) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/17E1CE16.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::6768C55E79B032D77A28DA5F0F20257417E1CE16:$' \ - && echo '6768C55E79B032D77A28DA5F0F20257417E1CE16:6:' | gpg --import-ownertrust - -# OpenWrt Build System (PGP key for 18.06 release builds) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/15807931.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::AD0507363D2BCE9C9E36CEC4FBCB78F015807931:$' \ - && echo 'AD0507363D2BCE9C9E36CEC4FBCB78F015807931:6:' | gpg --import-ownertrust - -# OpenWrt Build System (PGP key for 19.07 release builds) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/2074BE7A.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::D9C6901F45C9B86858687DFF28A39BC32074BE7A:$' \ - && echo 'D9C6901F45C9B86858687DFF28A39BC32074BE7A:6:' | gpg --import-ownertrust - -# OpenWrt Build System (PGP key for 21.02 release builds) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/88CA59E8.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::667205E379BAF348863A5C6688CA59E88F681580:$' \ - && echo '667205E379BAF348863A5C6688CA59E88F681580:6:' | gpg --import-ownertrust - -# OpenWrt Build System (GnuPGP key for 22.03 release builds) -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=gpg/CD54E82DADB3684D.asc' | gpg --import \ - && gpg --fingerprint --with-colons '' | grep '^fpr:::::::::BF856781A01293C8409ABE72CD54E82DADB3684D:$' \ - && echo 'BF856781A01293C8409ABE72CD54E82DADB3684D:6:' | gpg --import-ownertrust - -# untrusted comment: Public usign key for unattended snapshot builds -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=usign/b5043e70f9a75cde' --create-dirs -o /home/build/usign/b5043e70f9a75cde \ - && echo 'd7ac10f9ed1b38033855f3d27c9327d558444fca804c685b17d9dcfb0648228f */home/build/usign/b5043e70f9a75cde' | sha256sum --check - -# untrusted comment: Public usign key for 18.06 release builds -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=usign/1035ac73cc4e59e3' --create-dirs -o /home/build/usign/1035ac73cc4e59e3 \ - && echo '8dc2e7f5c4e634437e6641f4df77a18bf59f0c8e9016c8ba4be5d4a0111e68c2 */home/build/usign/1035ac73cc4e59e3' | sha256sum --check - -# untrusted comment: Public usign key for 19.07 release builds -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=usign/f94b9dd6febac963' --create-dirs -o /home/build/usign/f94b9dd6febac963 \ - && echo 'b1d09457cfbc36fccfe18382d65c54a2ade3e7fd3902da490a53aa517b512755 */home/build/usign/f94b9dd6febac963' | sha256sum --check - -# untrusted comment: Public usign key for 21.02 release builds -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=usign/2f8b0b98e08306bf' --create-dirs -o /home/build/usign/2f8b0b98e08306bf \ - && echo 'd102bdd75421c62490b97f520f9db06aadb44ad408b244755d26e96ea5cd3b7f */home/build/usign/2f8b0b98e08306bf' | sha256sum --check - -# untrusted comment: Public usign key for 22.03 release builds -RUN curl 'https://git.openwrt.org/?p=keyring.git;a=blob_plain;f=usign/4d017e6f1ed5d616' --create-dirs -o /home/build/usign/4d017e6f1ed5d616 \ - && echo 'f3c5fdf447d7c2743442e68077d60acc7c3e91754849e1f4b6be837b4204b7e2 */home/build/usign/4d017e6f1ed5d616' | sha256sum --check diff --git a/.circleci/README b/.circleci/README deleted file mode 100644 index 8626412c7b..0000000000 --- a/.circleci/README +++ /dev/null @@ -1,6 +0,0 @@ -# Build/update the docker image - -docker pull debian:10 -docker build --rm -t docker.io/openwrtorg/packages-cci:latest . -docker tag docker.io/openwrtorg/packages-cci: -docker push docker.io/openwrtorg/packages-cci diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 305813734c..0000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,182 +0,0 @@ -version: 2.0 -jobs: - build: - docker: - - image: docker.io/openwrtorg/packages-cci:v1.0.7 - environment: - - SDK_HOST: "downloads.openwrt.org" - - SDK_PATH: "snapshots/targets/ath79/generic" - - SDK_FILE: "openwrt-sdk-ath79-generic_*.Linux-x86_64.tar.xz" - - BRANCH: "master" - steps: - - checkout: - path: ~/openwrt_packages - - - run: - name: Check changes / verify commits - working_directory: ~/openwrt_packages - command: | - cat >> $BASH_ENV <: ' ($subject)" - RET=1 - fi - - body="$(git show -s --format=%b $commit)" - sob="$(git show -s --format='Signed-off-by: %aN <%aE>' $commit)" - if echo "$body" | grep -qF "$sob"; then - echo_green "Signed-off-by match author" - else - echo_red "Signed-off-by is missing or doesn't match author (should be '$sob')" - RET=1 - fi - done - - exit $RET - - - run: - name: Download the SDK - working_directory: ~/sdk - command: | - curl "https://$SDK_HOST/$SDK_PATH/sha256sums" -sS -o sha256sums - curl "https://$SDK_HOST/$SDK_PATH/sha256sums.asc" -fs -o sha256sums.asc || true - curl "https://$SDK_HOST/$SDK_PATH/sha256sums.sig" -fs -o sha256sums.sig || true - if [ ! -f sha256sums.asc ] && [ ! -f sha256sums.sig ]; then - echo_red "Missing sha256sums signature files" - exit 1 - fi - [ ! -f sha256sums.asc ] || gpg --with-fingerprint --verify sha256sums.asc sha256sums - if [ -f sha256sums.sig ]; then - VERIFIED= - for KEY in ~/usign/*; do - echo "Trying $KEY..." - if signify-openbsd -V -q -p "$KEY" -x sha256sums.sig -m sha256sums; then - echo "...verified" - VERIFIED=1 - break - fi - done - if [ -z "$VERIFIED" ]; then - echo_red "Could not verify usign signature" - exit 1 - fi - fi - rsync -av "$SDK_HOST::downloads/$SDK_PATH/$SDK_FILE" . - sha256sum -c --ignore-missing sha256sums - - - run: - name: Prepare build_dir - working_directory: ~/build_dir - command: | - tar Jxf ~/sdk/$SDK_FILE --strip=1 - touch .config - make prepare-tmpinfo scripts/config/conf - ./scripts/config/conf --defconfig=.config Config.in - make prereq - rm .config - cat > feeds.conf < /dev/null - make defconfig > /dev/null - # enable BUILD_LOG - sed -i 's/# CONFIG_BUILD_LOG is not set/CONFIG_BUILD_LOG=y/' .config - - - run: - name: Install & download source, check package, compile - working_directory: ~/build_dir - command: | - set +o pipefail - PKGS=$(cd ~/openwrt_packages; git diff --diff-filter=d --name-only "origin/$BRANCH..." | grep 'Makefile$' | grep -Ev '/files/|/src/' | awk -F/ '{ print $(NF-1) }') - if [ -z "$PKGS" ] ; then - echo_blue "WARNING: No new or modified packages found!" - exit 0 - fi - - echo_blue "=== Found new/modified packages: $PKGS" - for PKG in $PKGS ; do - echo_blue "===+ Install: $PKG" - ./scripts/feeds install "$PKG" - - echo_blue "===+ Download: $PKG" - make "package/$PKG/download" V=s - - echo_blue "===+ Check package: $PKG" - make "package/$PKG/check" V=s 2>&1 | tee logtmp - RET=${PIPESTATUS[0]} - - if [ $RET -ne 0 ]; then - echo_red "=> Package check failed: $RET)" - exit $RET - fi - - badhash_msg="HASH does not match " - badhash_msg+="|HASH uses deprecated hash," - badhash_msg+="|HASH is missing," - if grep -qE "$badhash_msg" logtmp; then - echo_red "=> Package HASH check failed" - exit 1 - fi - echo_green "=> Package check OK" - done - - make \ - -f .config \ - -f tmp/.packagedeps \ - -f <(echo '$(info $(sort $(package-y) $(package-m)))'; echo -en 'a:\n\t@:') \ - | tr ' ' '\n' >enabled-package-subdirs.txt - for PKG in $PKGS ; do - if ! grep -m1 -qE "(^|/)$PKG$" enabled-package-subdirs.txt; then - echo_red "===+ Building: $PKG skipped. It cannot be enabled with $SDK_FILE" - continue - fi - echo_blue "===+ Building: $PKG" - make "package/$PKG/compile" -j3 V=s || { - RET=$? - echo_red "===+ Building: $PKG failed, rebuilding with -j1 for human readable error log" - make "package/$PKG/compile" -j1 V=s; exit $RET - } - done - - - store_artifacts: - path: ~/build_dir/logs - - - store_artifacts: - path: ~/build_dir/bin - -workflows: - version: 2 - buildpr: - jobs: - - build: - filters: - branches: - ignore: master From 9d49df0dabcdd9135bf0b86374695b69cb4bf5b6 Mon Sep 17 00:00:00 2001 From: Josef Schlehofer Date: Fri, 26 Apr 2024 11:24:57 +0200 Subject: [PATCH 04/20] syslog-ng: update to version 4.7.1 Release notes: - https://github.com/syslog-ng/syslog-ng/releases/tag/syslog-ng-4.7.0 - https://github.com/syslog-ng/syslog-ng/releases/tag/syslog-ng-4.7.1 Also bump version in the config file to avoid warning Signed-off-by: Josef Schlehofer --- admin/syslog-ng/Makefile | 6 +++--- admin/syslog-ng/files/syslog-ng.conf | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/admin/syslog-ng/Makefile b/admin/syslog-ng/Makefile index 13b7fa0174..f366dcfcc1 100644 --- a/admin/syslog-ng/Makefile +++ b/admin/syslog-ng/Makefile @@ -1,8 +1,8 @@ include $(TOPDIR)/rules.mk PKG_NAME:=syslog-ng -PKG_VERSION:=4.6.0 -PKG_RELEASE:=2 +PKG_VERSION:=4.7.1 +PKG_RELEASE:=1 PKG_MAINTAINER:=Josef Schlehofer PKG_LICENSE:=LGPL-2.1-or-later GPL-2.0-or-later @@ -11,7 +11,7 @@ PKG_CPE_ID:=cpe:/a:balabit:syslog-ng PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE_URL:=https://github.com/syslog-ng/syslog-ng/releases/download/$(PKG_NAME)-$(PKG_VERSION)/ -PKG_HASH:=b69e3360dfb96a754a4e1cbead4daef37128b1152a23572356db4ab64a475d4f +PKG_HASH:=5477189a2d12325aa4faebfcf59f5bdd9084234732f0c3ec16dd253847dacf1c PKG_BUILD_PARALLEL:=1 PKG_INSTALL:=1 diff --git a/admin/syslog-ng/files/syslog-ng.conf b/admin/syslog-ng/files/syslog-ng.conf index ca7b0972a8..dafef7b7e4 100644 --- a/admin/syslog-ng/files/syslog-ng.conf +++ b/admin/syslog-ng/files/syslog-ng.conf @@ -1,7 +1,7 @@ # Collect all local logs into a single file /var/log/messages. # See https://www.syslog-ng.com/technical-documents/list/syslog-ng-open-source-edition -@version: 4.6 +@version: 4.7 @include "scl.conf" options { From 2c6d5adac049a55ca067255da90dc938b5604249 Mon Sep 17 00:00:00 2001 From: Dirk Brenken Date: Fri, 26 Apr 2024 17:03:14 +0200 Subject: [PATCH 05/20] banip: update 0.9.5-3 * allow multiple protocol/port definitions per feed, e.g. 'tcp udp 80 443 50000' * removed the default protocol/port limitation from asn feed Signed-off-by: Dirk Brenken --- net/banip/Makefile | 2 +- net/banip/files/README.md | 8 ++--- net/banip/files/banip-functions.sh | 50 ++++++++++++++++++------------ net/banip/files/banip.feeds | 3 +- 4 files changed, 37 insertions(+), 26 deletions(-) diff --git a/net/banip/Makefile b/net/banip/Makefile index 14636f1b81..41f01195a4 100644 --- a/net/banip/Makefile +++ b/net/banip/Makefile @@ -6,7 +6,7 @@ include $(TOPDIR)/rules.mk PKG_NAME:=banip PKG_VERSION:=0.9.5 -PKG_RELEASE:=2 +PKG_RELEASE:=3 PKG_LICENSE:=GPL-3.0-or-later PKG_MAINTAINER:=Dirk Brenken diff --git a/net/banip/files/README.md b/net/banip/files/README.md index a28067e84e..a29375bbf3 100644 --- a/net/banip/files/README.md +++ b/net/banip/files/README.md @@ -15,7 +15,7 @@ IP address blocking is commonly used to protect against brute force attacks, pre | adguard | adguard IPs | | | x | tcp: 80, 443 | [Link](https://github.com/dibdot/banIP-IP-blocklists) | | adguardtrackers | adguardtracker IPs | | | x | tcp: 80, 443 | [Link](https://github.com/dibdot/banIP-IP-blocklists) | | antipopads | antipopads IPs | | | x | tcp: 80, 443 | [Link](https://github.com/dibdot/banIP-IP-blocklists) | -| asn | ASN segments | | | x | tcp: 80, 443 | [Link](https://asn.ipinfo.app) | +| asn | ASN segments | x | x | x | | [Link](https://asn.ipinfo.app) | | backscatterer | backscatterer IPs | x | x | | | [Link](https://www.uceprotect.net/en/index.php) | | becyber | malicious attacker IPs | x | x | | | [Link](https://github.com/duggytuxy/malicious_ip_addresses) | | binarydefense | binary defense banlist | x | x | | | [Link](https://iplists.firehol.org/?ipset=bds_atif) | @@ -114,7 +114,7 @@ IP address blocking is commonly used to protect against brute force attacks, pre * It's strongly recommended to use the LuCI frontend to easily configure all aspects of banIP, the application is located in LuCI under the 'Services' menu * If you're using a complex network setup, e.g. special tunnel interfaces, than untick the 'Auto Detection' option under the 'General Settings' tab and set the required options manually * Start the service with '/etc/init.d/banip start' and check everything is working by running '/etc/init.d/banip status' and also check the 'Firewall Log' and 'Processing Log' tabs -* If you're going to configure banIP via CLI, edit the config file '/etc/config/banip' and enable the service (set ban\_enabled to '1'), then add pre-configured feeds via 'ban\_feed' (see the feed list above) and add/change other options to your needs (see the options reference below) +* If you're going to configure banIP via CLI, edit the config file '/etc/config/banip' and enable the service (set ban\_enabled to '1'), then add pre-configured feeds via 'ban\_feed' (see the feed list above) and add/change other options to your needs, see the options reference table below ## banIP CLI interface * All important banIP functions are accessible via CLI. @@ -428,12 +428,12 @@ A valid JSON source object contains the following information, e.g.: "rule_4": "/^(([0-9]{1,3}\\.){3}(1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])(\\/(1?[0-9]|2?[0-9]|3?[0-2]))?)$/{printf \"%s,\\n\",$1}", "rule_6": "/^(([0-9A-f]{0,4}:){1,7}[0-9A-f]{0,4}:?(\\/(1?[0-2][0-8]|[0-9][0-9]))?)$/{printf \"%s,\\n\",$1}", "descr": "tor exit nodes", - "flag": "tcp 80-89 443" + "flag": "gz tcp 80-88 udp 50000" }, [...] ``` Add an unique feed name (no spaces, no special chars) and make the required changes: adapt at least the URL, the regex and the description for a new feed. -Please note: the flag field is optional, it's a space separated list of options: supported are 'gz' as an archive format, protocols 'tcp' or 'udp' with port numbers/port ranges for destination port limitations. +Please note: the flag field is optional, it's a space separated list of options: supported are 'gz' as an archive format, protocols 'tcp' or 'udp' with port numbers/port ranges for destination port limitations - multiple definitions are possible. ## Support Please join the banIP discussion in this [forum thread](https://forum.openwrt.org/t/banip-support-thread/16985) or contact me by mail diff --git a/net/banip/files/banip-functions.sh b/net/banip/files/banip-functions.sh index 50e805b5a4..b5c9b47745 100644 --- a/net/banip/files/banip-functions.sh +++ b/net/banip/files/banip-functions.sh @@ -595,24 +595,30 @@ f_etag() { # build initial nft file with base table, chains and rules # f_nftinit() { - local wan_dev vlan_allow vlan_block log_ct log_icmp log_syn log_udp log_tcp feed_log feed_rc allow_proto allow_dport flag file="${1}" + local wan_dev vlan_allow vlan_block log_ct log_icmp log_syn log_udp log_tcp feed_log feed_rc flag tmp_proto tmp_port allow_dport file="${1}" wan_dev="$(printf "%s" "${ban_dev}" | "${ban_sedcmd}" 's/^/\"/;s/$/\"/;s/ /\", \"/g')" [ -n "${ban_vlanallow}" ] && vlan_allow="$(printf "%s" "${ban_vlanallow%%?}" | "${ban_sedcmd}" 's/^/\"/;s/$/\"/;s/ /\", \"/g')" [ -n "${ban_vlanblock}" ] && vlan_block="$(printf "%s" "${ban_vlanblock%%?}" | "${ban_sedcmd}" 's/^/\"/;s/$/\"/;s/ /\", \"/g')" for flag in ${ban_allowflag}; do - if [ -z "${allow_proto}" ] && { [ "${flag}" = "tcp" ] || [ "${flag}" = "udp" ]; }; then - allow_proto="${flag}" - elif [ -n "${allow_proto}" ] && [ -n "${flag//[![:digit]-]/}" ] && ! printf "%s" "${allow_dport}" | "${ban_grepcmd}" -qw "${flag}"; then - if [ -z "${allow_dport}" ]; then - allow_dport="${flag}" - else - allow_dport="${allow_dport}, ${flag}" + if [ "${flag}" = "tcp" ] || [ "${flag}" = "udp" ]; then + if [ -z "${tmp_proto}" ]; then + tmp_proto="${flag}" + elif ! printf "%s" "${tmp_proto}" | "${ban_grepcmd}" -qw "${flag}"; then + tmp_proto="${tmp_proto}, ${flag}" + fi + elif [ -n "${flag//[![:digit]-]/}" ]; then + if [ -z "${tmp_port}" ]; then + tmp_port="${flag}" + elif ! printf "%s" "${tmp_port}" | "${ban_grepcmd}" -qw "${flag}"; then + tmp_port="${tmp_port}, ${flag}" fi fi done - [ -n "${allow_dport}" ] && allow_dport="${allow_proto} dport { ${allow_dport} }" + if [ -n "${tmp_proto}" ] && [ -n "${tmp_port}" ]; then + allow_dport="meta l4proto { ${tmp_proto} } th dport { ${tmp_port} }" + fi if [ "${ban_logprerouting}" = "1" ]; then log_icmp="log level ${ban_nftloglevel} prefix \"banIP/pre-icmp/drop: \"" @@ -697,7 +703,7 @@ f_nftinit() { # f_down() { local log_input log_forwardwan log_forwardlan start_ts end_ts tmp_raw tmp_load tmp_file split_file ruleset_raw handle rc etag_rc - local expr cnt_set cnt_dl restore_rc feed_direction feed_rc feed_log feed_comp feed_proto feed_dport feed_target + local expr cnt_set cnt_dl restore_rc feed_direction feed_rc feed_log feed_comp feed_target feed_dport tmp_proto tmp_port flag local feed="${1}" proto="${2}" feed_url="${3}" feed_rule="${4}" feed_flag="${5}" start_ts="$(date +%s)" @@ -756,19 +762,25 @@ f_down() { # prepare feed flags # for flag in ${feed_flag}; do - if [ "${flag}" = "gz" ] && ! printf "%s" "${feed_comp}" | "${ban_grepcmd}" -qw "${flag}"; then + if [ "${flag}" = "gz" ]; then feed_comp="${flag}" - elif [ -z "${feed_proto}" ] && { [ "${flag}" = "tcp" ] || [ "${flag}" = "udp" ]; }; then - feed_proto="${flag}" - elif [ -n "${feed_proto}" ] && [ -n "${flag//[![:digit]-]/}" ] && ! printf "%s" "${feed_dport}" | "${ban_grepcmd}" -qw "${flag}"; then - if [ -z "${feed_dport}" ]; then - feed_dport="${flag}" - else - feed_dport="${feed_dport}, ${flag}" + elif [ "${flag}" = "tcp" ] || [ "${flag}" = "udp" ]; then + if [ -z "${tmp_proto}" ]; then + tmp_proto="${flag}" + elif ! printf "%s" "${tmp_proto}" | "${ban_grepcmd}" -qw "${flag}"; then + tmp_proto="${tmp_proto}, ${flag}" + fi + elif [ -n "${flag//[![:digit]-]/}" ]; then + if [ -z "${tmp_port}" ]; then + tmp_port="${flag}" + elif ! printf "%s" "${tmp_port}" | "${ban_grepcmd}" -qw "${flag}"; then + tmp_port="${tmp_port}, ${flag}" fi fi done - [ -n "${feed_dport}" ] && feed_dport="${feed_proto} dport { ${feed_dport} }" + if [ -n "${tmp_proto}" ] && [ -n "${tmp_port}" ]; then + feed_dport="meta l4proto { ${tmp_proto} } th dport { ${tmp_port} }" + fi # chain/rule maintenance # diff --git a/net/banip/files/banip.feeds b/net/banip/files/banip.feeds index 36982654ba..90eaf62adc 100644 --- a/net/banip/files/banip.feeds +++ b/net/banip/files/banip.feeds @@ -36,8 +36,7 @@ "url_6": "https://asn.ipinfo.app/api/text/list/", "rule_4": "/^(([0-9]{1,3}\\.){3}(1?[0-9][0-9]?|2[0-4][0-9]|25[0-5])(\\/(1?[0-9]|2?[0-9]|3?[0-2]))?)$/{printf \"%s,\\n\",$1}", "rule_6": "/^(([0-9A-f]{0,4}:){1,7}[0-9A-f]{0,4}:?(\\/(1?[0-2][0-8]|[0-9][0-9]))?)$/{printf \"%s,\\n\",$1}", - "descr": "ASN IP segments", - "flag": "tcp 80 443" + "descr": "ASN IP segments" }, "backscatterer":{ "url_4": "http://wget-mirrors.uceprotect.net/rbldnsd-all/ips.backscatterer.org.gz", From 4628b6bd43ac1c212363535fe4d32739c5f7e622 Mon Sep 17 00:00:00 2001 From: Paul Donald Date: Sun, 31 Mar 2024 20:25:17 +0200 Subject: [PATCH 06/20] p910nd: set bidi only if not already set Closes #23774 Signed-off-by: Paul Donald --- net/p910nd/Makefile | 2 +- net/p910nd/files/p910nd.hotplug | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/net/p910nd/Makefile b/net/p910nd/Makefile index c8934779a0..3e866590ce 100644 --- a/net/p910nd/Makefile +++ b/net/p910nd/Makefile @@ -9,7 +9,7 @@ include $(TOPDIR)/rules.mk PKG_NAME:=p910nd PKG_VERSION:=0.97 -PKG_RELEASE:=13 +PKG_RELEASE:=14 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 PKG_SOURCE_URL:=@SF/p910nd diff --git a/net/p910nd/files/p910nd.hotplug b/net/p910nd/files/p910nd.hotplug index 3440a86ed8..2bebaee845 100644 --- a/net/p910nd/files/p910nd.hotplug +++ b/net/p910nd/files/p910nd.hotplug @@ -281,8 +281,7 @@ get_and_store_printer_info() { [ "$DEBUG" ] && echo ${MFG:+MFG=$MFG} ${MDL:+MDL=$MDL} ${CMD:+CMD=$CMD} ${CLS:+CLS=$CLS} ${DES:+DES=$DES} ${SN:+SN=$SN} [ "$DEBUG" ] && echo 'uci set' for UCI_DEV_CFG_NUMBER: $UCI_DEV_CFG_NUMBER - # Take the USB info as fact: set bidir regardless. It seems to be a source of confusion. - eval "$uqsddu_cmd.bidirectional='$BIDIR'" + [ -z "$(eval "$uqgddu_cmd".bidirectional)" ] && eval "$uqsddu_cmd.bidirectional='$BIDIR'" [ -z "$(eval "$uqgddu_cmd".port)" ] && eval "$uqsddu_cmd.port='0'" [ -z "$(eval "$uqgddu_cmd".enabled)" ] && eval "$uqsddu_cmd.enabled='1'" [ -z "$(eval "$uqgddu_cmd".usbvidpid)" ] && [ -n "$THIS_USB_VIDPID" ] && eval "$uqsddu_cmd.usbvidpid='$THIS_USB_VIDPID'" From 06ea66c55866aa409ab567a593a22bd24e727f04 Mon Sep 17 00:00:00 2001 From: Nathan Friedly Date: Thu, 25 Apr 2024 13:19:33 -0400 Subject: [PATCH 07/20] librespeed-go: improve the description This swaps the order of the lines in the description so that when LuCI displays only the first line, it still offers some helpful information. Signed-off-by: Nathan Friedly --- net/librespeed-go/Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/net/librespeed-go/Makefile b/net/librespeed-go/Makefile index 984370e64b..0b5f4e2793 100644 --- a/net/librespeed-go/Makefile +++ b/net/librespeed-go/Makefile @@ -6,7 +6,7 @@ include $(TOPDIR)/rules.mk PKG_NAME:=librespeed-go PKG_VERSION:=1.1.5 -PKG_RELEASE:=3 +PKG_RELEASE:=4 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE_URL:=https://codeload.github.com/librespeed/speedtest-go/tar.gz/v$(PKG_VERSION)? @@ -39,10 +39,10 @@ define Package/librespeed-go endef define Package/librespeed-go/description - No Flash, No Java, No WebSocket, No Bullshit. - This is a very lightweight speed test implemented in JavaScript, using XMLHttpRequest and Web Workers. + + No Flash, No Java, No WebSocket, No Bullshit. endef define Package/librespeed-go/conffiles From 462bfd84e484da313b3f096cc4dbaec54c8c94a1 Mon Sep 17 00:00:00 2001 From: Tianling Shen Date: Sat, 27 Apr 2024 13:18:55 +0800 Subject: [PATCH 08/20] cloudflared: Update to 2024.4.1 Signed-off-by: Tianling Shen --- net/cloudflared/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/net/cloudflared/Makefile b/net/cloudflared/Makefile index f65f9eaa6b..b49ba6282f 100644 --- a/net/cloudflared/Makefile +++ b/net/cloudflared/Makefile @@ -5,12 +5,12 @@ include $(TOPDIR)/rules.mk PKG_NAME:=cloudflared -PKG_VERSION:=2024.4.0 +PKG_VERSION:=2024.4.1 PKG_RELEASE:=1 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE_URL:=https://codeload.github.com/cloudflare/cloudflared/tar.gz/$(PKG_VERSION)? -PKG_HASH:=a68882beb5ec2855a17253a751295c4cc4f8f9ca3b49920ffa7e398995f85055 +PKG_HASH:=11bed2bd793cc03775aa6270797ed328434bc982e09fd3597e267590f28d2436 PKG_LICENSE:=Apache-2.0 PKG_LICENSE_FILES:=LICENSE From a314f26e991fe5907820f29a8dfe0a80a48b48ba Mon Sep 17 00:00:00 2001 From: Christian Marangi Date: Tue, 7 Nov 2023 01:17:25 +0100 Subject: [PATCH 09/20] snort3: add patch and move to PCRE2 Add experimental patch and move package to PCRE2 as PCRE is EOL and won't receive any security updates anymore. Signed-off-by: Christian Marangi --- net/snort3/Makefile | 2 +- .../900-core-convert-project-to-PCRE2.patch | 2052 +++++++++++++++++ 2 files changed, 2053 insertions(+), 1 deletion(-) create mode 100644 net/snort3/patches/900-core-convert-project-to-PCRE2.patch diff --git a/net/snort3/Makefile b/net/snort3/Makefile index b9c85922b0..48ce85b456 100644 --- a/net/snort3/Makefile +++ b/net/snort3/Makefile @@ -29,7 +29,7 @@ define Package/snort3 DEPENDS:= \ +(TARGET_x86||TARGET_x86_64):hyperscan-runtime \ +(TARGET_x86||TARGET_x86_64):gperftools-runtime \ - +libstdcpp +libdaq3 +libdnet +libopenssl +libpcap +libpcre +libpthread \ + +libstdcpp +libdaq3 +libdnet +libopenssl +libpcap +libpcre2 +libpthread \ +libuuid +zlib +libhwloc +libtirpc @HAS_LUAJIT_ARCH +luajit +libatomic \ +kmod-nft-queue +liblzma +ucode +ucode-mod-fs +ucode-mod-uci TITLE:=Lightweight Network Intrusion Detection System diff --git a/net/snort3/patches/900-core-convert-project-to-PCRE2.patch b/net/snort3/patches/900-core-convert-project-to-PCRE2.patch new file mode 100644 index 0000000000..01199f87cd --- /dev/null +++ b/net/snort3/patches/900-core-convert-project-to-PCRE2.patch @@ -0,0 +1,2052 @@ +From a71cca137eb33f659354ce0ebda4951cb26485df Mon Sep 17 00:00:00 2001 +From: Christian Marangi +Date: Mon, 6 Nov 2023 22:43:59 +0100 +Subject: [PATCH] core: convert project to PCRE2 + +Convert project to PCRE2 as PCRE is EOL and won't receive any security +updates anymore. + +PCRE2 changed some API and concept. Mainly there isn't the concept of +study anymore, replaced by match_context concept and match_data is used +instead of ovector to handle results. Because of this the scratcher is +not needed anymore and is replaced by a simple function to setup the max +ovector size on end module init. + +Signed-off-by: Christian Marangi +--- + README.md | 17 +- + cmake/FindPCRE.cmake | 32 -- + cmake/FindPCRE2.cmake | 32 ++ + cmake/create_pkg_config.cmake | 4 +- + cmake/include_libraries.cmake | 2 +- + configure_cmake.sh | 16 +- + lua/balanced.lua | 2 +- + lua/max_detect.lua | 6 +- + lua/security.lua | 4 +- + snort.pc.in | 4 +- + src/CMakeLists.txt | 4 +- + src/detection/detection_module.cc | 48 +-- + src/detection/detection_options.cc | 6 +- + src/ips_options/ips_options.cc | 4 +- + src/ips_options/ips_pcre.cc | 391 ++++++++---------- + src/main/shell.cc | 9 +- + src/main/snort_config.h | 26 +- + .../appid/lua_detector_api.cc | 62 +-- + src/parser/parse_rule.cc | 4 +- + src/parser/parse_stream.cc | 2 +- + src/search_engines/test/hyperscan_test.cc | 2 +- + src/utils/stats.cc | 6 +- + src/utils/stats.h | 6 +- + src/utils/util.cc | 8 +- + tools/snort2lua/config_states/config_api.cc | 12 +- + .../config_states/config_no_option.cc | 14 +- + .../config_states/config_one_int_option.cc | 24 +- + tools/snort2lua/rule_states/CMakeLists.txt | 2 +- + tools/snort2lua/rule_states/rule_api.cc | 4 +- + .../{rule_pcre.cc => rule_pcre2.cc} | 40 +- + .../snort2lua/rule_states/rule_sd_pattern.cc | 4 +- + 31 files changed, 393 insertions(+), 404 deletions(-) + delete mode 100644 cmake/FindPCRE.cmake + create mode 100644 cmake/FindPCRE2.cmake + rename tools/snort2lua/rule_states/{rule_pcre.cc => rule_pcre2.cc} (80%) + +--- a/README.md ++++ b/README.md +@@ -8,13 +8,14 @@ topics: + + --- + +-* [Overview](#overview) +-* [Dependencies](#dependencies) +-* [Download](#download) +-* [Build Snort](#build-snort) +-* [Run Snort](#run-snort) +-* [Documentation](#documentation) +-* [Squeal](#squeal) ++- [Snort++](#snort) ++- [OVERVIEW](#overview) ++- [DEPENDENCIES](#dependencies) ++- [DOWNLOAD](#download) ++- [BUILD SNORT](#build-snort) ++- [RUN SNORT](#run-snort) ++- [DOCUMENTATION](#documentation) ++- [SQUEAL](#squeal) + + # OVERVIEW + +@@ -61,7 +62,7 @@ the latest: + * OpenSSL from https://www.openssl.org/source/ for SHA and MD5 file signatures, + the protected_content rule option, and SSL service detection + * pcap from http://www.tcpdump.org for tcpdump style logging +-* pcre from http://www.pcre.org for regular expression pattern matching ++* pcre2 from http://www.pcre.org for regular expression pattern matching + * pkgconfig from https://www.freedesktop.org/wiki/Software/pkg-config/ to locate build dependencies + * zlib from http://www.zlib.net for decompression + +--- a/cmake/FindPCRE.cmake ++++ /dev/null +@@ -1,32 +0,0 @@ +-# - Find pcre +-# Find the native PCRE includes and library +-# +-# PCRE_INCLUDE_DIR - where to find pcre.h, etc. +-# PCRE_LIBRARIES - List of libraries when using pcre. +-# PCRE_FOUND - True if pcre found. +- +-set(ERROR_MESSAGE +- "\n\tERROR! Libpcre library not found. +- \tGet it from http://www.pcre.org\n" +-) +- +-find_package(PkgConfig) +-pkg_check_modules(PC_PCRE libpcre) +- +-# Use PCRE_INCLUDE_DIR_HINT and PCRE_LIBRARIES_DIR_HINT from configure_cmake.sh as primary hints +-# and then package config information after that. +-find_path(PCRE_INCLUDE_DIR pcre.h +- HINTS ${PCRE_INCLUDE_DIR_HINT} ${PC_PCRE_INCLUDEDIR} ${PC_PCRE_INCLUDE_DIRS}) +-find_library(PCRE_LIBRARIES NAMES pcre +- HINTS ${PCRE_LIBRARIES_DIR_HINT} ${PC_PCRE_LIBDIR} ${PC_PCRE_LIBRARY_DIRS}) +- +-include(FindPackageHandleStandardArgs) +-find_package_handle_standard_args(PCRE +- REQUIRED_VARS PCRE_INCLUDE_DIR PCRE_LIBRARIES +- FAIL_MESSAGE "${ERROR_MESSAGE}" +-) +- +-mark_as_advanced( +- PCRE_LIBRARIES +- PCRE_INCLUDE_DIR +-) +--- /dev/null ++++ b/cmake/FindPCRE2.cmake +@@ -0,0 +1,32 @@ ++# - Find pcre2 ++# Find the native PCRE2 includes and library ++# ++# PCRE2_INCLUDE_DIR - where to find pcre2.h, etc. ++# PCRE2_LIBRARIES - List of libraries when using pcre2. ++# PCRE2_FOUND - True if pcre2 found. ++ ++set(ERROR_MESSAGE ++ "\n\tERROR! Libpcre2 library not found. ++ \tGet it from http://www.pcre.org\n" ++) ++ ++find_package(PkgConfig) ++pkg_check_modules(PC_PCRE2 libpcre2-8) ++ ++# Use PCRE2_INCLUDE_DIR_HINT and PCRE_LIBRARIES_DIR_HINT from configure_cmake.sh as primary hints ++# and then package config information after that. ++find_path(PCRE2_INCLUDE_DIR pcre2.h ++ HINTS ${PCRE2_INCLUDE_DIR_HINT} ${PC_PCRE2_INCLUDEDIR} ${PC_PCRE2_INCLUDE_DIRS}) ++find_library(PCRE2_LIBRARIES NAMES pcre2-8 ++ HINTS ${PCRE2_LIBRARIES_DIR_HINT} ${PC_PCRE2_LIBDIR} ${PC_PCRE2_LIBRARY_DIRS}) ++ ++include(FindPackageHandleStandardArgs) ++find_package_handle_standard_args(PCRE2-8 ++ REQUIRED_VARS PCRE2_INCLUDE_DIR PCRE2_LIBRARIES ++ FAIL_MESSAGE "${ERROR_MESSAGE}" ++) ++ ++mark_as_advanced( ++ PCRE2_LIBRARIES ++ PCRE2_INCLUDE_DIR ++) +--- a/cmake/create_pkg_config.cmake ++++ b/cmake/create_pkg_config.cmake +@@ -72,8 +72,8 @@ if(PCAP_INCLUDE_DIR) + set(PCAP_CPPFLAGS "-I${PCAP_INCLUDE_DIR}") + endif() + +-if(PCRE_INCLUDE_DIR) +- set(PCRE_CPPFLAGS "-I${PCRE_INCLUDE_DIR}") ++if(PCRE2_INCLUDE_DIR) ++ set(PCRE2_CPPFLAGS "-I${PCRE2_INCLUDE_DIR}") + endif() + + if(UUID_INCLUDE_DIR) +--- a/cmake/include_libraries.cmake ++++ b/cmake/include_libraries.cmake +@@ -8,7 +8,7 @@ find_package(HWLOC REQUIRED) + find_package(LuaJIT REQUIRED) + find_package(OpenSSL 1.1.1 REQUIRED) + find_package(PCAP REQUIRED) +-find_package(PCRE REQUIRED) ++find_package(PCRE2 REQUIRED) + find_package(ZLIB REQUIRED) + if (ENABLE_UNIT_TESTS) + find_package(CppUTest REQUIRED) +--- a/configure_cmake.sh ++++ b/configure_cmake.sh +@@ -90,10 +90,10 @@ Optional Packages: + luajit include directory + --with-luajit-libraries=DIR + luajit library directory +- --with-pcre-includes=DIR +- libpcre include directory +- --with-pcre-libraries=DIR +- libpcre library directory ++ --with-pcre2-includes=DIR ++ libpcre2 include directory ++ --with-pcre2-libraries=DIR ++ libpcre2 library directory + --with-dnet-includes=DIR + libdnet include directory + --with-dnet-libraries=DIR +@@ -417,11 +417,11 @@ while [ $# -ne 0 ]; do + --with-luajit-libraries=*) + append_cache_entry LUAJIT_LIBRARIES_DIR_HINT PATH $optarg + ;; +- --with-pcre-includes=*) +- append_cache_entry PCRE_INCLUDE_DIR_HINT PATH $optarg ++ --with-pcre2-includes=*) ++ append_cache_entry PCRE2_INCLUDE_DIR_HINT PATH $optarg + ;; +- --with-pcre-libraries=*) +- append_cache_entry PCRE_LIBRARIES_DIR_HINT PATH $optarg ++ --with-pcre2-libraries=*) ++ append_cache_entry PCRE2_LIBRARIES_DIR_HINT PATH $optarg + ;; + --with-dnet-includes=*) + append_cache_entry DNET_INCLUDE_DIR_HINT PATH $optarg +--- a/lua/balanced.lua ++++ b/lua/balanced.lua +@@ -5,7 +5,7 @@ + + arp_spoof = nil + +-detection = { pcre_override = false } ++detection = { pcre2_override = false } + + http_inspect.request_depth = 300 + http_inspect.response_depth = 500 +--- a/lua/max_detect.lua ++++ b/lua/max_detect.lua +@@ -10,13 +10,13 @@ ftp_server.check_encrypted = true + + detection = + { +- pcre_match_limit = 3500, +- pcre_match_limit_recursion = 3500, ++ pcre2_match_limit = 3500, ++ pcre2_match_limit_recursion = 3500, + + -- enable for hyperscan for best throughput + -- use multiple packet threads for fast startup + --hyperscan_literals = true, +- --pcre_to_regex = true ++ --pcre2_to_regex = true + } + + http_inspect.decompress_pdf = true +--- a/lua/security.lua ++++ b/lua/security.lua +@@ -9,8 +9,8 @@ ftp_server.check_encrypted = true + + detection = + { +- pcre_match_limit = 3500, +- pcre_match_limit_recursion = 3500 ++ pcre2_match_limit = 3500, ++ pcre2_match_limit_recursion = 3500 + } + + http_inspect.decompress_pdf = true +--- a/snort.pc.in ++++ b/snort.pc.in +@@ -9,7 +9,7 @@ mandir=@mandir@ + infodir=@infodir@ + + cpp_opts=DAQ LUAJIT +-cpp_opts_other=DNET HWLOC HYPERSCAN LZMA OPENSSL PCAP PCRE UUID ++cpp_opts_other=DNET HWLOC HYPERSCAN LZMA OPENSSL PCAP PCRE2 UUID + + PCAP_CPPFLAGS=@PCAP_CPPFLAGS@ + LUAJIT_CPPFLAGS=@LUAJIT_CPPFLAGS@ +@@ -18,7 +18,7 @@ DAQ_CPPFLAGS=@DAQ_CPPFLAGS@ + FLEX_CPPFLAGS=@FLEX_CPPFLAGS@ + OPENSSL_CPPFLAGS=@OPENSSL_CPPFLAGS@ + HWLOC_CPPFLAGS=@HWLOC_CPPFLAGS@ +-PCRE_CPPFLAGS=@PCRE_CPPFLAGS@ ++PCRE2_CPPFLAGS=@PCRE2_CPPFLAGS@ + LZMA_CPPFLAGS=@LZMA_CPPFLAGS@ + HYPERSCAN_CPPFLAGS=@HYPERSCAN_CPPFLAGS@ + UUID_CPPFLAGS=@UUID_CPPFLAGS@ +--- a/src/CMakeLists.txt ++++ b/src/CMakeLists.txt +@@ -10,7 +10,7 @@ set(EXTERNAL_LIBRARIES + ${LUAJIT_LIBRARIES} + ${OPENSSL_CRYPTO_LIBRARY} + ${PCAP_LIBRARIES} +- ${PCRE_LIBRARIES} ++ ${PCRE2_LIBRARIES} + ${ZLIB_LIBRARIES} + ) + +@@ -21,7 +21,7 @@ set(EXTERNAL_INCLUDES + ${HWLOC_INCLUDE_DIRS} + ${OPENSSL_INCLUDE_DIR} + ${PCAP_INCLUDE_DIR} +- ${PCRE_INCLUDE_DIR} ++ ${PCRE2_INCLUDE_DIR} + ${ZLIB_INCLUDE_DIRS} + ) + +--- a/src/detection/detection_module.cc ++++ b/src/detection/detection_module.cc +@@ -96,21 +96,21 @@ static const Parameter detection_params[ + { "offload_threads", Parameter::PT_INT, "0:max32", "0", + "maximum number of simultaneous offloads (defaults to disabled)" }, + +- { "pcre_enable", Parameter::PT_BOOL, nullptr, "true", +- "enable pcre pattern matching" }, ++ { "pcre2_enable", Parameter::PT_BOOL, nullptr, "true", ++ "enable pcre2 pattern matching" }, + +- { "pcre_match_limit", Parameter::PT_INT, "0:max32", "1500", +- "limit pcre backtracking, 0 = off" }, ++ { "pcre2_match_limit", Parameter::PT_INT, "0:max32", "1500", ++ "limit pcre2 backtracking, 0 = off" }, + +- { "pcre_match_limit_recursion", Parameter::PT_INT, "0:max32", "1500", +- "limit pcre stack consumption, 0 = off" }, ++ { "pcre2_match_limit_recursion", Parameter::PT_INT, "0:max32", "1500", ++ "limit pcre2 stack consumption, 0 = off" }, + +- { "pcre_override", Parameter::PT_BOOL, nullptr, "true", +- "enable pcre match limit overrides when pattern matching (ie ignore /O)" }, ++ { "pcre2_override", Parameter::PT_BOOL, nullptr, "true", ++ "enable pcre2 match limit overrides when pattern matching (ie ignore /O)" }, + + #ifdef HAVE_HYPERSCAN +- { "pcre_to_regex", Parameter::PT_BOOL, nullptr, "false", +- "enable the use of regex instead of pcre for compatible expressions" }, ++ { "pcre2_to_regex", Parameter::PT_BOOL, nullptr, "false", ++ "enable the use of regex instead of pcre2 for compatible expressions" }, + #endif + + { "enable_address_anomaly_checks", Parameter::PT_BOOL, nullptr, "false", +@@ -221,13 +221,13 @@ bool DetectionModule::set(const char*, V + else if ( v.is("offload_threads") ) + sc->offload_threads = v.get_uint32(); + +- else if ( v.is("pcre_enable") ) +- v.update_mask(sc->run_flags, RUN_FLAG__NO_PCRE, true); ++ else if ( v.is("pcre2_enable") ) ++ v.update_mask(sc->run_flags, RUN_FLAG__NO_PCRE2, true); + +- else if ( v.is("pcre_match_limit") ) +- sc->pcre_match_limit = v.get_uint32(); ++ else if ( v.is("pcre2_match_limit") ) ++ sc->pcre2_match_limit = v.get_uint32(); + +- else if ( v.is("pcre_match_limit_recursion") ) ++ else if ( v.is("pcre2_match_limit_recursion") ) + { + // Cap the pcre recursion limit to not exceed the stack size. + // +@@ -252,21 +252,21 @@ bool DetectionModule::set(const char*, V + if (max_rec < 0) + max_rec = 0; + +- sc->pcre_match_limit_recursion = v.get_uint32(); +- if (sc->pcre_match_limit_recursion > max_rec) ++ sc->pcre2_match_limit_recursion = v.get_uint32(); ++ if (sc->pcre2_match_limit_recursion > max_rec) + { +- sc->pcre_match_limit_recursion = max_rec; +- LogMessage("Capping pcre_match_limit_recursion to %ld, thread stack_size %ld.\n", +- sc->pcre_match_limit_recursion, thread_stack_size); ++ sc->pcre2_match_limit_recursion = max_rec; ++ LogMessage("Capping pcre2_match_limit_recursion to %ld, thread stack_size %llu.\n", ++ sc->pcre2_match_limit_recursion, thread_stack_size); + } + } + +- else if ( v.is("pcre_override") ) +- sc->pcre_override = v.get_bool(); ++ else if ( v.is("pcre2_override") ) ++ sc->pcre2_override = v.get_bool(); + + #ifdef HAVE_HYPERSCAN +- else if ( v.is("pcre_to_regex") ) +- sc->pcre_to_regex = v.get_bool(); ++ else if ( v.is("pcre2_to_regex") ) ++ sc->pcre2_to_regex = v.get_bool(); + #endif + + else if ( v.is("enable_address_anomaly_checks") ) +--- a/src/detection/detection_options.cc ++++ b/src/detection/detection_options.cc +@@ -595,7 +595,7 @@ int detection_option_node_evaluate( + { + if ( !child_node->is_relative ) + { +- // If it's a non-relative content or pcre, no reason ++ // If it's a non-relative content or pcre2, no reason + // to check again. Only increment result once. + // Should hit this condition on first loop iteration. + if ( loop_count == 1 ) +@@ -661,10 +661,10 @@ int detection_option_node_evaluate( + } + + // If all children branches matched, we don't need to reeval any of +- // the children so don't need to reeval this content/pcre rule ++ // the children so don't need to reeval this content/pcre2 rule + // option at a new offset. + // Else, reset the DOE ptr to last eval for offset/depth, +- // distance/within adjustments for this same content/pcre rule option. ++ // distance/within adjustments for this same content/pcre2 rule option. + // If the node and its sub-tree propagate MATCH back, + // then all its continuations are recalled. + if ( result == node->num_children ) +--- a/src/ips_options/ips_options.cc ++++ b/src/ips_options/ips_options.cc +@@ -72,7 +72,7 @@ extern const BaseApi* ips_ip_proto[]; + extern const BaseApi* ips_isdataat[]; + extern const BaseApi* ips_itype[]; + extern const BaseApi* ips_msg[]; +-extern const BaseApi* ips_pcre[]; ++extern const BaseApi* ips_pcre2[]; + extern const BaseApi* ips_priority[]; + extern const BaseApi* ips_raw_data[]; + extern const BaseApi* ips_rem[]; +@@ -146,7 +146,7 @@ void load_ips_options() + PluginManager::load_plugins(ips_isdataat); + PluginManager::load_plugins(ips_itype); + PluginManager::load_plugins(ips_msg); +- PluginManager::load_plugins(ips_pcre); ++ PluginManager::load_plugins(ips_pcre2); + PluginManager::load_plugins(ips_priority); + PluginManager::load_plugins(ips_raw_data); + PluginManager::load_plugins(ips_rem); +--- a/src/ips_options/ips_pcre.cc ++++ b/src/ips_options/ips_pcre.cc +@@ -23,7 +23,8 @@ + #include "config.h" + #endif + +-#include ++#define PCRE2_CODE_UNIT_WIDTH 8 ++#include + + #include + +@@ -43,33 +44,31 @@ + + using namespace snort; + +-#ifndef PCRE_STUDY_JIT_COMPILE +-#define PCRE_STUDY_JIT_COMPILE 0 ++#ifndef PCRE2_STUDY_JIT_COMPILE ++#define PCRE2_STUDY_JIT_COMPILE 0 + #endif + + //#define NO_JIT // uncomment to disable JIT for Xcode + + #ifdef NO_JIT +-#define PCRE_STUDY_FLAGS 0 +-#define pcre_release(x) pcre_free(x) ++#define PCRE2_JIT 0 + #else +-#define PCRE_STUDY_FLAGS PCRE_STUDY_JIT_COMPILE +-#define pcre_release(x) pcre_free_study(x) ++#define PCRE2_JIT PCRE2_STUDY_JIT_COMPILE + #endif ++#define pcre2_release(x) pcre2_code_free(x) + + #define SNORT_PCRE_RELATIVE 0x00010 // relative to the end of the last match + #define SNORT_PCRE_INVERT 0x00020 // invert detect + #define SNORT_PCRE_ANCHORED 0x00040 + #define SNORT_OVERRIDE_MATCH_LIMIT 0x00080 // Override default limits on match & match recursion + +-#define s_name "pcre" ++#define s_name "pcre2" + #define mod_regex_name "regex" + +-struct PcreData ++struct Pcre2Data + { +- pcre* re; /* compiled regex */ +- pcre_extra* pe; /* studied regex foo */ +- bool free_pe; ++ pcre2_code* re; /* compiled regex */ ++ pcre2_match_context* match_context; /* match_context for limits */ + int options; /* sp_pcre specific options (relative & inverse) */ + char* expression; + }; +@@ -83,36 +82,32 @@ struct PcreData + // by verify; search uses the value in snort conf + static int s_ovector_max = -1; + +-static unsigned scratch_index; +-static ScratchAllocator* scratcher = nullptr; +- +-static THREAD_LOCAL ProfileStats pcrePerfStats; ++static THREAD_LOCAL ProfileStats pcre2PerfStats; + + //------------------------------------------------------------------------- + // implementation foo + //------------------------------------------------------------------------- + +-static void pcre_capture( +- const void* code, const void* extra) ++static void pcre2_capture(const void* code) + { + int tmp_ovector_size = 0; + +- pcre_fullinfo((const pcre*)code, (const pcre_extra*)extra, +- PCRE_INFO_CAPTURECOUNT, &tmp_ovector_size); ++ pcre2_pattern_info((const pcre2_code *)code, ++ PCRE2_INFO_CAPTURECOUNT, &tmp_ovector_size); + + if (tmp_ovector_size > s_ovector_max) + s_ovector_max = tmp_ovector_size; + } + +-static void pcre_check_anchored(PcreData* pcre_data) ++static void pcre2_check_anchored(Pcre2Data* pcre2_data) + { + int rc; + unsigned long int options = 0; + +- if ((pcre_data == nullptr) || (pcre_data->re == nullptr) || (pcre_data->pe == nullptr)) ++ if ((pcre2_data == nullptr) || (pcre2_data->re == nullptr)) + return; + +- rc = pcre_fullinfo(pcre_data->re, pcre_data->pe, PCRE_INFO_OPTIONS, (void*)&options); ++ rc = pcre2_pattern_info(pcre2_data->re, PCRE2_INFO_ARGOPTIONS, (void*)&options); + switch (rc) + { + /* pcre_fullinfo fails for the following: +@@ -127,40 +122,41 @@ static void pcre_check_anchored(PcreData + /* This is the success code */ + break; + +- case PCRE_ERROR_NULL: +- ParseError("pcre_fullinfo: code and/or where were null."); ++ case PCRE2_ERROR_NULL: ++ ParseError("pcre2_fullinfo: code and/or where were null."); + return; + +- case PCRE_ERROR_BADMAGIC: +- ParseError("pcre_fullinfo: compiled code didn't have correct magic."); ++ case PCRE2_ERROR_BADMAGIC: ++ ParseError("pcre2_fullinfo: compiled code didn't have correct magic."); + return; + +- case PCRE_ERROR_BADOPTION: +- ParseError("pcre_fullinfo: option type is invalid."); ++ case PCRE2_ERROR_BADOPTION: ++ ParseError("pcre2_fullinfo: option type is invalid."); + return; + + default: +- ParseError("pcre_fullinfo: Unknown error code."); ++ ParseError("pcre2_fullinfo: Unknown error code."); + return; + } + +- if ((options & PCRE_ANCHORED) && !(options & PCRE_MULTILINE)) ++ if ((options & PCRE2_ANCHORED) && !(options & PCRE2_MULTILINE)) + { + /* This means that this pcre rule option shouldn't be EvalStatus + * even if any of it's relative children should fail to match. + * It is anchored to the cursor set by the previous cursor setting + * rule option */ +- pcre_data->options |= SNORT_PCRE_ANCHORED; ++ pcre2_data->options |= SNORT_PCRE_ANCHORED; + } + } + +-static void pcre_parse(const SnortConfig* sc, const char* data, PcreData* pcre_data) ++static void pcre2_parse(const SnortConfig* sc, const char* data, Pcre2Data* pcre2_data) + { +- const char* error; ++ PCRE2_UCHAR error[128]; + char* re, * free_me; + char* opts; + char delimit = '/'; +- int erroffset; ++ int errorcode; ++ PCRE2_SIZE erroffset; + int compile_flags = 0; + + if (data == nullptr) +@@ -180,7 +176,7 @@ static void pcre_parse(const SnortConfig + + if (*re == '!') + { +- pcre_data->options |= SNORT_PCRE_INVERT; ++ pcre2_data->options |= SNORT_PCRE_INVERT; + re++; + while (isspace((int)*re)) + re++; +@@ -212,7 +208,7 @@ static void pcre_parse(const SnortConfig + else if (*re != delimit) + goto syntax; + +- pcre_data->expression = snort_strdup(re); ++ pcre2_data->expression = snort_strdup(re); + + /* find ending delimiter, trim delimit chars */ + opts = strrchr(re, delimit); +@@ -230,25 +226,25 @@ static void pcre_parse(const SnortConfig + { + switch (*opts) + { +- case 'i': compile_flags |= PCRE_CASELESS; break; +- case 's': compile_flags |= PCRE_DOTALL; break; +- case 'm': compile_flags |= PCRE_MULTILINE; break; +- case 'x': compile_flags |= PCRE_EXTENDED; break; ++ case 'i': compile_flags |= PCRE2_CASELESS; break; ++ case 's': compile_flags |= PCRE2_DOTALL; break; ++ case 'm': compile_flags |= PCRE2_MULTILINE; break; ++ case 'x': compile_flags |= PCRE2_EXTENDED; break; + + /* + * these are pcre specific... don't work with perl + */ +- case 'A': compile_flags |= PCRE_ANCHORED; break; +- case 'E': compile_flags |= PCRE_DOLLAR_ENDONLY; break; +- case 'G': compile_flags |= PCRE_UNGREEDY; break; ++ case 'A': compile_flags |= PCRE2_ANCHORED; break; ++ case 'E': compile_flags |= PCRE2_DOLLAR_ENDONLY; break; ++ case 'G': compile_flags |= PCRE2_UNGREEDY; break; + + /* +- * these are snort specific don't work with pcre or perl ++ * these are snort specific don't work with pcre2 or perl + */ +- case 'R': pcre_data->options |= SNORT_PCRE_RELATIVE; break; ++ case 'R': pcre2_data->options |= SNORT_PCRE_RELATIVE; break; + case 'O': +- if ( sc->pcre_override ) +- pcre_data->options |= SNORT_OVERRIDE_MATCH_LIMIT; ++ if ( sc->pcre2_override ) ++ pcre2_data->options |= SNORT_OVERRIDE_MATCH_LIMIT; + break; + + default: +@@ -259,71 +255,68 @@ static void pcre_parse(const SnortConfig + } + + /* now compile the re */ +- pcre_data->re = pcre_compile(re, compile_flags, &error, &erroffset, nullptr); ++ pcre2_data->re = pcre2_compile((PCRE2_SPTR)re, PCRE2_ZERO_TERMINATED, compile_flags, &errorcode, &erroffset, nullptr); ++ ++ if (pcre2_data->re == nullptr) ++ { ++ pcre2_get_error_message(errorcode, error, 128); ++ ParseError(": pcre2 compile of '%s' failed at offset " ++ "%zu : %s", re, erroffset, error); ++ return; ++ } + +- if (pcre_data->re == nullptr) ++ /* now create match context */ ++ pcre2_data->match_context = pcre2_match_context_create(NULL); ++ if(pcre2_data->match_context == NULL) + { +- ParseError(": pcre compile of '%s' failed at offset " +- "%d : %s", re, erroffset, error); ++ ParseError(": failed to allocate memory for match context"); + return; + } + + /* now study it... */ +- pcre_data->pe = pcre_study(pcre_data->re, PCRE_STUDY_FLAGS, &error); ++ if (PCRE2_JIT) ++ errorcode = pcre2_jit_compile(pcre2_data->re, PCRE2_JIT_COMPLETE); + +- if (pcre_data->pe) ++ if (PCRE2_JIT || errorcode) + { +- if ((sc->get_pcre_match_limit() != 0) && +- !(pcre_data->options & SNORT_OVERRIDE_MATCH_LIMIT)) ++ if ((sc->get_pcre2_match_limit() != 0) && ++ !(pcre2_data->options & SNORT_OVERRIDE_MATCH_LIMIT)) + { +- if ( !(pcre_data->pe->flags & PCRE_EXTRA_MATCH_LIMIT) ) +- pcre_data->pe->flags |= PCRE_EXTRA_MATCH_LIMIT; +- +- pcre_data->pe->match_limit = sc->get_pcre_match_limit(); ++ pcre2_set_match_limit(pcre2_data->match_context, sc->get_pcre2_match_limit()); + } + +- if ((sc->get_pcre_match_limit_recursion() != 0) && +- !(pcre_data->options & SNORT_OVERRIDE_MATCH_LIMIT)) ++ if ((sc->get_pcre2_match_limit_recursion() != 0) && ++ !(pcre2_data->options & SNORT_OVERRIDE_MATCH_LIMIT)) + { +- if ( !(pcre_data->pe->flags & PCRE_EXTRA_MATCH_LIMIT_RECURSION) ) +- pcre_data->pe->flags |= PCRE_EXTRA_MATCH_LIMIT_RECURSION; +- +- pcre_data->pe->match_limit_recursion = +- sc->get_pcre_match_limit_recursion(); ++ pcre2_set_match_limit(pcre2_data->match_context, sc->get_pcre2_match_limit_recursion()); + } + } + else + { +- if (!(pcre_data->options & SNORT_OVERRIDE_MATCH_LIMIT) && +- ((sc->get_pcre_match_limit() != 0) || +- (sc->get_pcre_match_limit_recursion() != 0))) ++ if (!(pcre2_data->options & SNORT_OVERRIDE_MATCH_LIMIT) && ++ ((sc->get_pcre2_match_limit() != 0) || ++ (sc->get_pcre2_match_limit_recursion() != 0))) + { +- pcre_data->pe = (pcre_extra*)snort_calloc(sizeof(pcre_extra)); +- pcre_data->free_pe = true; +- +- if (sc->get_pcre_match_limit() != 0) ++ if (sc->get_pcre2_match_limit() != 0) + { +- pcre_data->pe->flags |= PCRE_EXTRA_MATCH_LIMIT; +- pcre_data->pe->match_limit = sc->get_pcre_match_limit(); ++ pcre2_set_match_limit(pcre2_data->match_context, sc->get_pcre2_match_limit()); + } + +- if (sc->get_pcre_match_limit_recursion() != 0) ++ if (sc->get_pcre2_match_limit_recursion() != 0) + { +- pcre_data->pe->flags |= PCRE_EXTRA_MATCH_LIMIT_RECURSION; +- pcre_data->pe->match_limit_recursion = +- sc->get_pcre_match_limit_recursion(); ++ pcre2_set_match_limit(pcre2_data->match_context, sc->get_pcre2_match_limit_recursion()); + } + } + } + +- if (error != nullptr) ++ if (PCRE2_JIT && errorcode) + { +- ParseError("pcre study failed : %s", error); ++ ParseError("pcre2 JIT failed : %s", error); + return; + } + +- pcre_capture(pcre_data->re, pcre_data->pe); +- pcre_check_anchored(pcre_data); ++ pcre2_capture(pcre2_data->re); ++ pcre2_check_anchored(pcre2_data); + + snort_free(free_me); + return; +@@ -332,40 +325,44 @@ syntax: + snort_free(free_me); + + // ensure integrity from parse error to fatal error +- if ( !pcre_data->expression ) +- pcre_data->expression = snort_strdup(""); ++ if ( !pcre2_data->expression ) ++ pcre2_data->expression = snort_strdup(""); + +- ParseError("unable to parse pcre %s", data); ++ ParseError("unable to parse pcre2 %s", data); + } + + /* +- * Perform a search of the PCRE data. ++ * Perform a search of the PCRE2 data. + * found_offset will be set to -1 when the find is unsuccessful OR the routine is inverted + */ +-static bool pcre_search( ++static bool pcre2_search( + Packet* p, +- const PcreData* pcre_data, ++ const Pcre2Data* pcre2_data, + const uint8_t* buf, + unsigned len, + unsigned start_offset, + int& found_offset) + { ++ pcre2_match_data *match_data; ++ PCRE2_SIZE *ovector; + bool matched; + + found_offset = -1; + +- std::vector ss = p->context->conf->state[get_instance_id()]; +- assert(ss[scratch_index]); ++ match_data = pcre2_match_data_create(p->context->conf->pcre2_ovector_size, NULL); ++ if (match_data == nullptr) { ++ pc.pcre2_error++; ++ return false; ++ } + +- int result = pcre_exec( +- pcre_data->re, /* result of pcre_compile() */ +- pcre_data->pe, /* result of pcre_study() */ +- (const char*)buf, /* the subject string */ +- len, /* the length of the subject string */ +- start_offset, /* start at offset 0 in the subject */ +- 0, /* options(handled at compile time */ +- (int*)ss[scratch_index], /* vector for substring information */ +- p->context->conf->pcre_ovector_size); /* number of elements in the vector */ ++ int result = pcre2_match( ++ pcre2_data->re, /* result of pcre_compile() */ ++ (PCRE2_SPTR)buf, /* the subject string */ ++ (PCRE2_SIZE)len, /* the length of the subject string */ ++ (PCRE2_SIZE)start_offset, /* start at offset 0 in the subject */ ++ 0, /* options(handled at compile time */ ++ match_data, /* match data to store the match results */ ++ pcre2_data->match_context); /* match context for limits */ + + if (result >= 0) + { +@@ -390,34 +387,37 @@ static bool pcre_search( + * and a single int for scratch space. + */ + +- found_offset = ((int*)ss[scratch_index])[1]; ++ ovector = pcre2_get_ovector_pointer(match_data); ++ found_offset = ovector[1]; + } +- else if (result == PCRE_ERROR_NOMATCH) ++ else if (result == PCRE2_ERROR_NOMATCH) + { + matched = false; + } +- else if (result == PCRE_ERROR_MATCHLIMIT) ++ else if (result == PCRE2_ERROR_MATCHLIMIT) + { +- pc.pcre_match_limit++; ++ pc.pcre2_match_limit++; + matched = false; + } +- else if (result == PCRE_ERROR_RECURSIONLIMIT) ++ else if (result == PCRE2_ERROR_RECURSIONLIMIT) + { +- pc.pcre_recursion_limit++; ++ pc.pcre2_recursion_limit++; + matched = false; + } + else + { +- pc.pcre_error++; ++ pc.pcre2_error++; + return false; + } + + /* invert sense of match */ +- if (pcre_data->options & SNORT_PCRE_INVERT) ++ if (pcre2_data->options & SNORT_PCRE_INVERT) + { + matched = !matched; + } + ++ pcre2_match_data_free(match_data); ++ + return matched; + } + +@@ -425,14 +425,14 @@ static bool pcre_search( + // class methods + //------------------------------------------------------------------------- + +-class PcreOption : public IpsOption ++class Pcre2Option : public IpsOption + { + public: +- PcreOption(PcreData* c) : ++ Pcre2Option(Pcre2Data* c) : + IpsOption(s_name, RULE_OPTION_TYPE_CONTENT) + { config = c; } + +- ~PcreOption() override; ++ ~Pcre2Option() override; + + uint32_t hash() const override; + bool operator==(const IpsOption&) const override; +@@ -446,17 +446,17 @@ public: + EvalStatus eval(Cursor&, Packet*) override; + bool retry(Cursor&, const Cursor&) override; + +- PcreData* get_data() ++ Pcre2Data* get_data() + { return config; } + +- void set_data(PcreData* pcre) ++ void set_data(Pcre2Data* pcre) + { config = pcre; } + + private: +- PcreData* config; ++ Pcre2Data* config; + }; + +-PcreOption::~PcreOption() ++Pcre2Option::~Pcre2Option() + { + if ( !config ) + return; +@@ -464,21 +464,16 @@ PcreOption::~PcreOption() + if ( config->expression ) + snort_free(config->expression); + +- if ( config->pe ) +- { +- if ( config->free_pe ) +- snort_free(config->pe); +- else +- pcre_release(config->pe); +- } ++ if ( config->match_context ) ++ pcre2_match_context_free(config->match_context); + + if ( config->re ) +- free(config->re); // external allocation ++ pcre2_code_free(config->re); // external allocation + + snort_free(config); + } + +-uint32_t PcreOption::hash() const ++uint32_t Pcre2Option::hash() const + { + uint32_t a = 0, b = 0, c = 0; + int expression_len = strlen(config->expression); +@@ -532,14 +527,14 @@ uint32_t PcreOption::hash() const + return c; + } + +-bool PcreOption::operator==(const IpsOption& ips) const ++bool Pcre2Option::operator==(const IpsOption& ips) const + { + if ( !IpsOption::operator==(ips) ) + return false; + +- const PcreOption& rhs = (const PcreOption&)ips; +- PcreData* left = config; +- PcreData* right = rhs.config; ++ const Pcre2Option& rhs = (const Pcre2Option&)ips; ++ Pcre2Data* left = config; ++ Pcre2Data* right = rhs.config; + + if (( strcmp(left->expression, right->expression) == 0) && + ( left->options == right->options)) +@@ -550,13 +545,13 @@ bool PcreOption::operator==(const IpsOpt + return false; + } + +-IpsOption::EvalStatus PcreOption::eval(Cursor& c, Packet* p) ++IpsOption::EvalStatus Pcre2Option::eval(Cursor& c, Packet* p) + { + // cppcheck-suppress unreadVariable +- RuleProfile profile(pcrePerfStats); ++ RuleProfile profile(pcre2PerfStats); + +- // short circuit this for testing pcre performance impact +- if ( p->context->conf->no_pcre() ) ++ // short circuit this for testing pcre2 performance impact ++ if ( p->context->conf->no_pcre2() ) + return NO_MATCH; + + unsigned pos = c.get_delta(); +@@ -570,7 +565,7 @@ IpsOption::EvalStatus PcreOption::eval(C + + int found_offset = -1; // where is the ending location of the pattern + +- if ( pcre_search(p, config, c.buffer()+adj, c.size()-adj, pos, found_offset) ) ++ if ( pcre2_search(p, config, c.buffer()+adj, c.size()-adj, pos, found_offset) ) + { + if ( found_offset > 0 ) + { +@@ -585,17 +580,17 @@ IpsOption::EvalStatus PcreOption::eval(C + } + + // we always advance by found_offset so no adjustments to cursor are done +-// here; note also that this means relative pcre matches on overlapping ++// here; note also that this means relative pcre2 matches on overlapping + // patterns won't work. given the test pattern "ABABACD": + // + // ( sid:1; content:"ABA"; content:"C"; within:1; ) +-// ( sid:2; pcre:"/ABA/"; content:"C"; within:1; ) ++// ( sid:2; pcre2:"/ABA/"; content:"C"; within:1; ) + // + // sid 1 will fire but sid 2 will NOT. this example is easily fixed by +-// using content, but more advanced pcre won't work for the relative / ++// using content, but more advanced pcre2 won't work for the relative / + // overlap case. + +-bool PcreOption::retry(Cursor&, const Cursor&) ++bool Pcre2Option::retry(Cursor&, const Cursor&) + { + if ((config->options & (SNORT_PCRE_INVERT | SNORT_PCRE_ANCHORED))) + { +@@ -616,46 +611,43 @@ static const Parameter s_params[] = + { nullptr, Parameter::PT_MAX, nullptr, nullptr, nullptr } + }; + +-struct PcreStats ++struct Pcre2Stats + { +- PegCount pcre_rules; ++ PegCount pcre2_rules; + #ifdef HAVE_HYPERSCAN +- PegCount pcre_to_hyper; ++ PegCount pcre2_to_hyper; + #endif +- PegCount pcre_native; +- PegCount pcre_negated; ++ PegCount pcre2_native; ++ PegCount pcre2_negated; + }; + + const PegInfo pcre_pegs[] = + { +- { CountType::SUM, "pcre_rules", "total rules processed with pcre option" }, ++ { CountType::SUM, "pcre2_rules", "total rules processed with pcre2 option" }, + #ifdef HAVE_HYPERSCAN +- { CountType::SUM, "pcre_to_hyper", "total pcre rules by hyperscan engine" }, ++ { CountType::SUM, "pcre2_to_hyper", "total pcre2 rules by hyperscan engine" }, + #endif +- { CountType::SUM, "pcre_native", "total pcre rules compiled by pcre engine" }, +- { CountType::SUM, "pcre_negated", "total pcre rules using negation syntax" }, ++ { CountType::SUM, "pcre2_native", "total pcre2 rules compiled by pcre engine" }, ++ { CountType::SUM, "pcre2_negated", "total pcre2 rules using negation syntax" }, + { CountType::END, nullptr, nullptr } + }; + +-PcreStats pcre_stats; ++Pcre2Stats pcre2_stats; + + #define s_help \ +- "rule option for matching payload data with pcre" ++ "rule option for matching payload data with pcre2" + +-class PcreModule : public Module ++class Pcre2Module : public Module + { + public: +- PcreModule() : Module(s_name, s_help, s_params) ++ Pcre2Module() : Module(s_name, s_help, s_params) + { + data = nullptr; +- scratcher = new SimpleScratchAllocator(scratch_setup, scratch_cleanup); +- scratch_index = scratcher->get_id(); + } + +- ~PcreModule() override ++ ~Pcre2Module() override + { + delete data; +- delete scratcher; + } + + #ifdef HAVE_HYPERSCAN +@@ -665,12 +657,12 @@ public: + bool end(const char*, int, SnortConfig*) override; + + ProfileStats* get_profile() const override +- { return &pcrePerfStats; } ++ { return &pcre2PerfStats; } + + const PegInfo* get_pegs() const override; + PegCount* get_counts() const override; + +- PcreData* get_data(); ++ Pcre2Data* get_data(); + + bool global_stats() const override + { return true; } +@@ -682,31 +674,28 @@ public: + { return mod_regex; } + + private: +- PcreData* data; ++ Pcre2Data* data; + Module* mod_regex = nullptr; + std::string re; +- +- static bool scratch_setup(SnortConfig*); +- static void scratch_cleanup(SnortConfig*); + }; + +-PcreData* PcreModule::get_data() ++Pcre2Data* Pcre2Module::get_data() + { +- PcreData* tmp = data; ++ Pcre2Data* tmp = data; + data = nullptr; + return tmp; + } + +-const PegInfo* PcreModule::get_pegs() const ++const PegInfo* Pcre2Module::get_pegs() const + { return pcre_pegs; } + +-PegCount* PcreModule::get_counts() const +-{ return (PegCount*)&pcre_stats; } ++PegCount* Pcre2Module::get_counts() const ++{ return (PegCount*)&pcre2_stats; } + + #ifdef HAVE_HYPERSCAN +-bool PcreModule::begin(const char* name, int v, SnortConfig* sc) ++bool Pcre2Module::begin(const char* name, int v, SnortConfig* sc) + { +- if ( sc->pcre_to_regex ) ++ if ( sc->pcre2_to_regex ) + { + if ( !mod_regex ) + mod_regex = ModuleManager::get_module(mod_regex_name); +@@ -718,7 +707,7 @@ bool PcreModule::begin(const char* name, + } + #endif + +-bool PcreModule::set(const char* name, Value& v, SnortConfig* sc) ++bool Pcre2Module::set(const char* name, Value& v, SnortConfig* sc) + { + assert(v.is("~re")); + re = v.get_string(); +@@ -729,50 +718,28 @@ bool PcreModule::set(const char* name, V + return true; + } + +-bool PcreModule::end(const char* name, int v, SnortConfig* sc) ++bool Pcre2Module::end(const char* name, int v, SnortConfig* sc) + { + if( mod_regex ) + mod_regex = mod_regex->end(name, v, sc) ? mod_regex : nullptr; + + if ( !mod_regex ) + { +- data = (PcreData*)snort_calloc(sizeof(*data)); +- pcre_parse(sc, re.c_str(), data); ++ data = (Pcre2Data*)snort_calloc(sizeof(*data)); ++ pcre2_parse(sc, re.c_str(), data); + } + +- return true; +-} +- +-bool PcreModule::scratch_setup(SnortConfig* sc) +-{ +- if ( s_ovector_max < 0 ) +- return false; +- + // The pcre_fullinfo() function can be used to find out how many + // capturing subpatterns there are in a compiled pattern. The + // smallest size for ovector that will allow for n captured + // substrings, in addition to the offsets of the substring matched + // by the whole pattern is 3(n+1). +- +- sc->pcre_ovector_size = 3 * (s_ovector_max + 1); +- s_ovector_max = -1; +- +- for ( unsigned i = 0; i < sc->num_slots; ++i ) +- { +- std::vector& ss = sc->state[i]; +- ss[scratch_index] = snort_calloc(sc->pcre_ovector_size, sizeof(int)); ++ if ( s_ovector_max >= 0 ) { ++ sc->pcre2_ovector_size = 3 * (s_ovector_max + 1); ++ s_ovector_max = -1; + } +- return true; +-} + +-void PcreModule::scratch_cleanup(SnortConfig* sc) +-{ +- for ( unsigned i = 0; i < sc->num_slots; ++i ) +- { +- std::vector& ss = sc->state[i]; +- snort_free(ss[scratch_index]); +- ss[scratch_index] = nullptr; +- } ++ return true; + } + + //------------------------------------------------------------------------- +@@ -780,21 +747,21 @@ void PcreModule::scratch_cleanup(SnortCo + //------------------------------------------------------------------------- + + static Module* mod_ctor() +-{ return new PcreModule; } ++{ return new Pcre2Module; } + + static void mod_dtor(Module* m) + { delete m; } + +-static IpsOption* pcre_ctor(Module* p, OptTreeNode* otn) ++static IpsOption* pcre2_ctor(Module* p, OptTreeNode* otn) + { +- pcre_stats.pcre_rules++; +- PcreModule* m = (PcreModule*)p; ++ pcre2_stats.pcre2_rules++; ++ Pcre2Module* m = (Pcre2Module*)p; + + #ifdef HAVE_HYPERSCAN + Module* mod_regex = m->get_mod_regex(); + if ( mod_regex ) + { +- pcre_stats.pcre_to_hyper++; ++ pcre2_stats.pcre2_to_hyper++; + const IpsApi* opt_api = IpsManager::get_option_api(mod_regex_name); + return opt_api->ctor(mod_regex, otn); + } +@@ -803,16 +770,16 @@ static IpsOption* pcre_ctor(Module* p, O + UNUSED(otn); + #endif + { +- pcre_stats.pcre_native++; +- PcreData* d = m->get_data(); +- return new PcreOption(d); ++ pcre2_stats.pcre2_native++; ++ Pcre2Data* d = m->get_data(); ++ return new Pcre2Option(d); + } + } + +-static void pcre_dtor(IpsOption* p) ++static void pcre2_dtor(IpsOption* p) + { delete p; } + +-static const IpsApi pcre_api = ++static const IpsApi pcre2_api = + { + { + PT_IPS_OPTION, +@@ -832,17 +799,17 @@ static const IpsApi pcre_api = + nullptr, + nullptr, + nullptr, +- pcre_ctor, +- pcre_dtor, ++ pcre2_ctor, ++ pcre2_dtor, + nullptr + }; + + #ifdef BUILDING_SO + SO_PUBLIC const BaseApi* snort_plugins[] = + #else +-const BaseApi* ips_pcre[] = ++const BaseApi* ips_pcre2[] = + #endif + { +- &pcre_api.base, ++ &pcre2_api.base, + nullptr + }; +--- a/src/main/shell.cc ++++ b/src/main/shell.cc +@@ -29,7 +29,8 @@ + #include + #include + #include +-#include ++#define PCRE2_CODE_UNIT_WIDTH 8 ++#include + #include + #include + #include +@@ -138,13 +139,17 @@ static void install_version_strings(lua_ + + static void install_dependencies_strings(Shell* sh, lua_State* L) + { ++ + assert(dep_versions[0]); + ++ const char pcre2_version[32] = { 0 }; + std::vector vs; + const char* ljv = LUAJIT_VERSION; + const char* osv = OpenSSL_version(SSLEAY_VERSION); + const char* lpv = pcap_lib_version(); + ++ pcre2_config(PCRE2_CONFIG_VERSION, (PCRE2_UCHAR8 *)pcre2_version); ++ + while (*ljv and !isdigit(*ljv)) + ++ljv; + while (*osv and !isdigit(*osv)) +@@ -156,7 +161,7 @@ static void install_dependencies_strings + vs.push_back(ljv); + vs.push_back(osv); + vs.push_back(lpv); +- vs.push_back(pcre_version()); ++ vs.push_back(pcre2_version); + vs.push_back(zlib_version); + #ifdef HAVE_HYPERSCAN + vs.push_back(hs_version()); +--- a/src/main/snort_config.h ++++ b/src/main/snort_config.h +@@ -60,7 +60,7 @@ enum RunFlag + RUN_FLAG__PCAP_SHOW = 0x00001000, + RUN_FLAG__SHOW_FILE_CODES = 0x00002000, + RUN_FLAG__PAUSE = 0x00004000, +- RUN_FLAG__NO_PCRE = 0x00008000, ++ RUN_FLAG__NO_PCRE2 = 0x00008000, + + RUN_FLAG__DUMP_RULE_STATE = 0x00010000, + RUN_FLAG__DUMP_RULE_DEPS = 0x00020000, +@@ -214,13 +214,13 @@ public: + + //------------------------------------------------------ + // detection module stuff +- // FIXIT-L pcre_match_limit* are interdependent ++ // FIXIT-L pcre2_match_limit* are interdependent + // somehow a packet thread needs a much lower setting +- long int pcre_match_limit = 1500; +- long int pcre_match_limit_recursion = 1500; ++ long int pcre2_match_limit = 1500; ++ long int pcre2_match_limit_recursion = 1500; + +- int pcre_ovector_size = 0; +- bool pcre_override = true; ++ int pcre2_ovector_size = 0; ++ bool pcre2_override = true; + + uint32_t run_flags = 0; + +@@ -228,7 +228,7 @@ public: + unsigned offload_threads = 0; // disabled + + bool hyperscan_literals = false; +- bool pcre_to_regex = false; ++ bool pcre2_to_regex = false; + + bool global_rule_state = false; + bool global_default_rule_state = true; +@@ -600,8 +600,8 @@ public: + bool alert_before_pass() const + { return run_flags & RUN_FLAG__ALERT_BEFORE_PASS; } + +- bool no_pcre() const +- { return run_flags & RUN_FLAG__NO_PCRE; } ++ bool no_pcre2() const ++ { return run_flags & RUN_FLAG__NO_PCRE2; } + + bool conf_error_out() const + { return run_flags & RUN_FLAG__CONF_ERROR_OUT; } +@@ -616,11 +616,11 @@ public: + uint8_t new_ttl() const + { return get_network_policy()->new_ttl; } + +- long int get_pcre_match_limit() const +- { return pcre_match_limit; } ++ long int get_pcre2_match_limit() const ++ { return pcre2_match_limit; } + +- long int get_pcre_match_limit_recursion() const +- { return pcre_match_limit_recursion; } ++ long int get_pcre2_match_limit_recursion() const ++ { return pcre2_match_limit_recursion; } + + const ProfilerConfig* get_profiler() const + { return profiler; } +--- a/src/network_inspectors/appid/lua_detector_api.cc ++++ b/src/network_inspectors/appid/lua_detector_api.cc +@@ -25,7 +25,8 @@ + + #include "lua_detector_api.h" + #include +-#include ++#define PCRE2_CODE_UNIT_WIDTH 8 ++#include + #include + + #include "detection/fp_config.h" +@@ -714,7 +715,7 @@ static int detector_get_packet_direction + return 1; + } + +-/**Perform a pcre match with grouping. A simple regular expression match with no grouping ++/**Perform a pcre2 match with grouping. A simple regular expression match with no grouping + * can also be performed. + * + * @param Lua_State* - Lua state variable. +@@ -723,41 +724,50 @@ static int detector_get_packet_direction + * @return matchedStrings/stack - matched strings are pushed on stack starting with group 0. + * There may be 0 or more strings. + */ +-static int detector_get_pcre_groups(lua_State* L) ++static int detector_get_pcre2_groups(lua_State* L) + { + auto& ud = *UserData::check(L, DETECTOR, 1); + // Verify detector user data and that we are in packet context + LuaStateDescriptor* lsd = ud->validate_lua_state(true); + +- int ovector[OVECCOUNT]; +- const char* error; +- int erroffset; ++ PCRE2_SIZE* ovector; ++ pcre2_match_data* match_data; ++ PCRE2_UCHAR error[128]; ++ PCRE2_SIZE erroffset; ++ int errorcode; + + const char* pattern = lua_tostring(L, 2); + unsigned int offset = lua_tonumber(L, 3); /*offset can be zero, no check necessary. */ + + /*compile the regular expression pattern, and handle errors */ +- pcre* re = pcre_compile(pattern, // the pattern +- PCRE_DOTALL, // default options - dot matches all inc \n +- &error, // for error message +- &erroffset, // for error offset +- nullptr); // use default character tables ++ pcre2_code* re = pcre2_compile((PCRE2_SPTR)pattern, // the pattern ++ PCRE2_ZERO_TERMINATED, // assume zero terminated strings ++ PCRE2_DOTALL, // default options - dot matches all inc \n ++ &errorcode, // for error message ++ &erroffset, // for error offset ++ nullptr); // use default character tables + + if (re == nullptr) + { +- appid_log(lsd->ldp.pkt, TRACE_ERROR_LEVEL, "PCRE compilation failed at offset %d: %s\n", erroffset, error); ++ pcre2_get_error_message(errorcode, error, 128); ++ appid_log(lsd->ldp.pkt, TRACE_ERROR_LEVEL, "PCRE2 compilation failed at offset %d: %s\n", erroffset, error); ++ return 0; ++ } ++ ++ match_data = pcre2_match_data_create(OVECCOUNT, NULL); ++ if (match_data == nullptr) { ++ appid_log(lsd->ldp.pkt, TRACE_ERROR_LEVEL, "PCRE2 failed to allocate mem for match_data\n"); + return 0; + } + + /*pattern match against the subject string. */ +- int rc = pcre_exec(re, // compiled pattern +- nullptr, // no extra data +- (const char*)lsd->ldp.data, // subject string +- lsd->ldp.size, // length of the subject +- offset, // offset 0 +- 0, // default options +- ovector, // output vector for substring information +- OVECCOUNT); // number of elements in the output vector ++ int rc = pcre2_match(re, // compiled pattern ++ (PCRE2_SPTR)lsd->ldp.data, // subject string ++ (PCRE2_SIZE)lsd->ldp.size, // length of the subject ++ (PCRE2_SIZE)offset, // offset 0 ++ 0, // default options ++ match_data, // match data for match results ++ NULL); // no match context + + if (rc >= 0) + { +@@ -771,10 +781,11 @@ static int detector_get_pcre_groups(lua_ + if (!lua_checkstack(L, rc)) + { + appid_log(lsd->ldp.pkt, TRACE_WARNING_LEVEL, "Cannot grow Lua stack by %d slots to hold " +- "PCRE matches\n", rc); ++ "PCRE2 matches\n", rc); + return 0; + } + ++ ovector = pcre2_get_ovector_pointer(match_data); + for (int i = 0; i < rc; i++) + { + lua_pushlstring(L, (const char*)lsd->ldp.data + ovector[2*i], ovector[2*i+1] - +@@ -784,12 +795,13 @@ static int detector_get_pcre_groups(lua_ + else + { + // log errors except no matches +- if (rc != PCRE_ERROR_NOMATCH) +- appid_log(lsd->ldp.pkt, TRACE_WARNING_LEVEL, "PCRE regular expression group match failed. rc: %d\n", rc); ++ if (rc != PCRE2_ERROR_NOMATCH) ++ appid_log(lsd->ldp.pkt, TRACE_WARNING_LEVEL, "PCRE2 regular expression group match failed. rc: %d\n", rc); + rc = 0; + } + +- pcre_free(re); ++ pcre2_match_data_free(match_data); ++ pcre2_code_free(re); + return rc; + } + +@@ -3229,7 +3241,7 @@ static const luaL_Reg detector_methods[] + { "getPacketSize", detector_get_packet_size }, + { "getPacketDir", detector_get_packet_direction }, + { "matchSimplePattern", detector_memcmp }, +- { "getPcreGroups", detector_get_pcre_groups }, ++ { "getPcreGroups", detector_get_pcre2_groups }, + { "getL4Protocol", detector_get_protocol_type }, + { "getPktSrcAddr", detector_get_packet_src_addr }, + { "getPktDstAddr", detector_get_packet_dst_addr }, +--- a/src/parser/parse_rule.cc ++++ b/src/parser/parse_rule.cc +@@ -911,10 +911,10 @@ void parse_rule_dir(SnortConfig*, const + ParseError("illegal direction specifier: %s", s); + } + +-// Values of the rule options "pcre", "regex" and "sd_pattern" are already escaped ++// Values of the rule options "pcre2", "regex" and "sd_pattern" are already escaped + // They are not unescaped during the rule parsing + static bool is_already_escaped(const std::string& opt_key) +-{ return opt_key == "pcre" or opt_key == "regex" or opt_key == "sd_pattern"; } ++{ return opt_key == "pcre2" or opt_key == "regex" or opt_key == "sd_pattern"; } + + static std::string escape(const std::string& s) + { +--- a/src/parser/parse_stream.cc ++++ b/src/parser/parse_stream.cc +@@ -603,7 +603,7 @@ static bool exec( + // that individual rule options can do whatever + static int get_escape(const string& s) + { +- if ( s == "pcre" ) ++ if ( s == "pcre2" ) + return 0; // no escape, option goes to ; + + else if ( s == "regex" || s == "sd_pattern" ) +--- a/src/search_engines/test/hyperscan_test.cc ++++ b/src/search_engines/test/hyperscan_test.cc +@@ -223,7 +223,7 @@ TEST(mpse_hs_match, regex) + CHECK(hits == 3); + } + +-TEST(mpse_hs_match, pcre) ++TEST(mpse_hs_match, pcre2) + { + Mpse::PatternDescriptor desc; + +--- a/src/utils/stats.cc ++++ b/src/utils/stats.cc +@@ -227,9 +227,9 @@ const PegInfo pc_names[] = + { CountType::SUM, "offload_fallback", "fast pattern offload search fallback attempts" }, + { CountType::SUM, "offload_failures", "fast pattern offload search failures" }, + { CountType::SUM, "offload_suspends", "fast pattern search suspends due to offload context chains" }, +- { CountType::SUM, "pcre_match_limit", "total number of times pcre hit the match limit" }, +- { CountType::SUM, "pcre_recursion_limit", "total number of times pcre hit the recursion limit" }, +- { CountType::SUM, "pcre_error", "total number of times pcre returns error" }, ++ { CountType::SUM, "pcre2_match_limit", "total number of times pcre2 hit the match limit" }, ++ { CountType::SUM, "pcre2_recursion_limit", "total number of times pcre2 hit the recursion limit" }, ++ { CountType::SUM, "pcre2_error", "total number of times pcre2 returns error" }, + { CountType::SUM, "cont_creations", "total number of continuations created" }, + { CountType::SUM, "cont_recalls", "total number of continuations recalled" }, + { CountType::SUM, "cont_flows", "total number of flows using continuation" }, +--- a/src/utils/stats.h ++++ b/src/utils/stats.h +@@ -60,9 +60,9 @@ struct PacketCount + PegCount offload_fallback; + PegCount offload_failures; + PegCount offload_suspends; +- PegCount pcre_match_limit; +- PegCount pcre_recursion_limit; +- PegCount pcre_error; ++ PegCount pcre2_match_limit; ++ PegCount pcre2_recursion_limit; ++ PegCount pcre2_error; + PegCount cont_creations; + PegCount cont_recalls; + PegCount cont_flows; +--- a/src/utils/util.cc ++++ b/src/utils/util.cc +@@ -30,7 +30,8 @@ + #include + #include + #include +-#include ++#define PCRE2_CODE_UNIT_WIDTH 8 ++#include + #include + #include + #include +@@ -105,10 +106,13 @@ void StoreSnortInfoStrings() + + int DisplayBanner() + { ++ PCRE2_UCHAR pcre2_version[32]; + const char* ljv = LUAJIT_VERSION; + while ( *ljv && !isdigit(*ljv) ) + ++ljv; + ++ pcre2_config(PCRE2_CONFIG_VERSION, pcre2_version); ++ + LogMessage("\n"); + LogMessage(" ,,_ -*> Snort++ <*-\n"); + #ifdef BUILD +@@ -125,7 +129,7 @@ int DisplayBanner() + LogMessage(" Using LuaJIT version %s\n", ljv); + LogMessage(" Using %s\n", OpenSSL_version(SSLEAY_VERSION)); + LogMessage(" Using %s\n", pcap_lib_version()); +- LogMessage(" Using PCRE version %s\n", pcre_version()); ++ LogMessage(" Using PCRE version %s\n", pcre2_version); + LogMessage(" Using ZLIB version %s\n", zlib_version); + #ifdef HAVE_HYPERSCAN + LogMessage(" Using Hyperscan version %s\n", hs_version()); +--- a/tools/snort2lua/config_states/config_api.cc ++++ b/tools/snort2lua/config_states/config_api.cc +@@ -105,13 +105,13 @@ extern const ConvertMap* min_ttl_map; + extern const ConvertMap* na_policy_mode_map; + extern const ConvertMap* new_ttl_map; + extern const ConvertMap* nolog_map; +-extern const ConvertMap* nopcre_map; ++extern const ConvertMap* nopcre2_map; + extern const ConvertMap* no_promisc_map; + extern const ConvertMap* obfuscate_map; + extern const ConvertMap* order_map; + extern const ConvertMap* paf_max_map; +-extern const ConvertMap* pcre_match_limit_map; +-extern const ConvertMap* pcre_match_limit_recursion_map; ++extern const ConvertMap* pcre2_match_limit_map; ++extern const ConvertMap* pcre2_match_limit_recursion_map; + extern const ConvertMap* pkt_count_map; + extern const ConvertMap* ppm_map; + extern const ConvertMap* policy_id_map; +@@ -224,13 +224,13 @@ const std::vector con + na_policy_mode_map, + new_ttl_map, + nolog_map, +- nopcre_map, ++ nopcre2_map, + no_promisc_map, + obfuscate_map, + order_map, + paf_max_map, +- pcre_match_limit_map, +- pcre_match_limit_recursion_map, ++ pcre2_match_limit_map, ++ pcre2_match_limit_recursion_map, + pkt_count_map, + ppm_map, + policy_id_map, +--- a/tools/snort2lua/config_states/config_no_option.cc ++++ b/tools/snort2lua/config_states/config_no_option.cc +@@ -250,18 +250,18 @@ static const ConvertMap enable_mpls_over + const ConvertMap* enable_mpls_overlapping_ip_map = &enable_mpls_overlapping_ip_api; + + /************************************************* +- ******************** nopcre ******************* ++ ******************** nopcre2 ******************* + *************************************************/ + +-static const std::string nopcre = "nopcre"; +-static const std::string pcre_enable = "pcre_enable"; +-static const ConvertMap nopcre_api = ++static const std::string nopcre2 = "nopcre2"; ++static const std::string pcre2_enable = "pcre2_enable"; ++static const ConvertMap nopcre2_api = + { +- nopcre, +- config_false_no_opt_ctor<& nopcre, & detection, & pcre_enable> ++ nopcre2, ++ config_false_no_opt_ctor<& nopcre2, & detection, & pcre2_enable> + }; + +-const ConvertMap* nopcre_map = &nopcre_api; ++const ConvertMap* nopcre2_map = &nopcre2_api; + + /************************************************* + ****************** obfuscate ****************** +--- a/tools/snort2lua/config_states/config_one_int_option.cc ++++ b/tools/snort2lua/config_states/config_one_int_option.cc +@@ -217,30 +217,30 @@ static const ConvertMap new_ttl_api = + const ConvertMap* new_ttl_map = &new_ttl_api; + + /************************************************* +- ************** pcre_match_limit ************** ++ ************** pcre2_match_limit ************** + *************************************************/ + +-static const std::string pcre_match_limit = "pcre_match_limit"; +-static const ConvertMap pcre_match_limit_api = ++static const std::string pcre2_match_limit = "pcre2_match_limit"; ++static const ConvertMap pcre2_match_limit_api = + { +- pcre_match_limit, +- config_int_ctor<& pcre_match_limit, & detection>, ++ pcre2_match_limit, ++ config_int_ctor<& pcre2_match_limit, & detection>, + }; + +-const ConvertMap* pcre_match_limit_map = &pcre_match_limit_api; ++const ConvertMap* pcre2_match_limit_map = &pcre2_match_limit_api; + + /************************************************** +- ********** pcre_match_limit_recursion ********** ++ ********** pcre2_match_limit_recursion ********** + **************************************************/ + +-static const std::string pcre_match_limit_recursion = "pcre_match_limit_recursion"; +-static const ConvertMap pcre_match_limit_recursion_api = ++static const std::string pcre2_match_limit_recursion = "pcre_match_limit_recursion"; ++static const ConvertMap pcre2_match_limit_recursion_api = + { +- pcre_match_limit_recursion, +- config_int_ctor<& pcre_match_limit_recursion, & detection>, ++ pcre2_match_limit_recursion, ++ config_int_ctor<& pcre2_match_limit_recursion, & detection>, + }; + +-const ConvertMap* pcre_match_limit_recursion_map = &pcre_match_limit_recursion_api; ++const ConvertMap* pcre2_match_limit_recursion_map = &pcre2_match_limit_recursion_api; + + /************************************************* + ****************** pkt_count ***************** +--- a/tools/snort2lua/rule_states/CMakeLists.txt ++++ b/tools/snort2lua/rule_states/CMakeLists.txt +@@ -12,7 +12,7 @@ add_library( rule_states OBJECT + rule_http_encode.cc + rule_isdataat.cc + rule_metadata.cc +- rule_pcre.cc ++ rule_pcre2.cc + rule_react.cc + rule_reference.cc + rule_replace.cc +--- a/tools/snort2lua/rule_states/rule_api.cc ++++ b/tools/snort2lua/rule_states/rule_api.cc +@@ -75,7 +75,7 @@ extern const ConvertMap* modbus_data_map + extern const ConvertMap* modbus_func_map; + extern const ConvertMap* modbus_unit_map; + extern const ConvertMap* msg_map; +-extern const ConvertMap* pcre_map; ++extern const ConvertMap* pcre2_map; + extern const ConvertMap* pkt_data_map; + extern const ConvertMap* priority_map; + extern const ConvertMap* protected_content_map; +@@ -159,7 +159,7 @@ const std::vector rul + modbus_func_map, + modbus_unit_map, + msg_map, +- pcre_map, ++ pcre2_map, + pkt_data_map, + priority_map, + protected_content_map, +--- a/tools/snort2lua/rule_states/rule_pcre.cc ++++ /dev/null +@@ -1,159 +0,0 @@ +-//-------------------------------------------------------------------------- +-// Copyright (C) 2014-2024 Cisco and/or its affiliates. All rights reserved. +-// +-// This program is free software; you can redistribute it and/or modify it +-// under the terms of the GNU General Public License Version 2 as published +-// by the Free Software Foundation. You may not use, modify or distribute +-// this program under any other version of the GNU General Public License. +-// +-// This program is distributed in the hope that it will be useful, but +-// WITHOUT ANY WARRANTY; without even the implied warranty of +-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +-// General Public License for more details. +-// +-// You should have received a copy of the GNU General Public License along +-// with this program; if not, write to the Free Software Foundation, Inc., +-// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +-//-------------------------------------------------------------------------- +-// rule_pcre.cc author Josh Rosenbaum +- +-#include +-#include +- +-#include "conversion_state.h" +-#include "helpers/converter.h" +-#include "helpers/s2l_util.h" +-#include "rule_api.h" +- +-namespace rules +-{ +-namespace +-{ +-class Pcre : public ConversionState +-{ +-public: +- Pcre(Converter& c) : ConversionState(c) { } +- bool convert(std::istringstream& data) override; +-}; +-} // namespace +- +-bool Pcre::convert(std::istringstream& data_stream) +-{ +- bool sticky_buffer_set = false; +- std::string buffer = "pkt_data"; +- +- char delim = '/'; +- std::string pcre_str = util::get_rule_option_args(data_stream); +- std::string pattern; +- std::string new_opts; +- std::string options; +- +- if (pcre_str.front() == '!') +- { +- pattern += "!"; +- pcre_str.erase(pcre_str.begin()); +- } +- +- if (pcre_str.front() != '"' || pcre_str.back() != '"') +- { +- rule_api.bad_rule(data_stream, "pattern must be enclosed in \""); +- return set_next_rule_state(data_stream); +- } +- +- pcre_str.erase(pcre_str.begin()); +- pattern += '"'; +- +- if (pcre_str.front() == 'm') +- { +- pcre_str.erase(pcre_str.begin()); +- pattern += 'm'; +- delim = pcre_str.front(); +- } +- +- const std::size_t pattern_end = pcre_str.rfind(delim); +- if ((pcre_str.front() != delim) || (pattern_end == 0)) +- { +- std::string tmp = "Regex must be enclosed in delim '"; +- tmp.append(delim, 1); +- rule_api.bad_rule(data_stream, tmp + "'"); +- return set_next_rule_state(data_stream); +- } +- +- pattern += pcre_str.substr(0, pattern_end + 1); +- options = pcre_str.substr(pattern_end + 1, std::string::npos); +- new_opts = ""; +- +- for (char c : options ) +- { +- std::string sticky_buffer = std::string(); // empty string +- +- switch (c) +- { +- case 'B': sticky_buffer = "raw_data"; break; +- case 'U': sticky_buffer = "http_uri"; break; +- case 'P': sticky_buffer = "pcre_P_option_body"; break; +- case 'H': sticky_buffer = "pcre_H_option_header"; break; +- case 'M': sticky_buffer = "http_method"; break; +- case 'C': sticky_buffer = "http_cookie"; break; +- case 'I': sticky_buffer = "http_raw_uri"; break; +- case 'D': sticky_buffer = "http_raw_header"; break; +- case 'K': sticky_buffer = "http_raw_cookie"; break; +- case 'S': sticky_buffer = "http_stat_code"; break; +- case 'Y': sticky_buffer = "http_stat_msg"; break; +- case 'i': +- case 's': +- case 'm': +- case 'x': +- case 'A': +- case 'E': +- case 'G': +- case 'O': +- case 'R': +- case '"': // end of reg_ex +- new_opts += c; +- break; +- default: +- { +- std::string dlt_opt = "unknown option - '"; +- dlt_opt.append(1, c); +- dlt_opt += "'"; +- rule_api.bad_rule(data_stream, dlt_opt); +- break; +- } +- } +- +- if (!sticky_buffer.empty()) +- { +- buffer = sticky_buffer; +- +- if (sticky_buffer_set) +- rule_api.bad_rule(data_stream, +- "Two sticky buffers set for this regular expression!"); +- else +- sticky_buffer_set = true; +- } +- } +- +- rule_api.add_option("pcre", pattern + new_opts); +- +- rule_api.set_curr_options_buffer(buffer); +- +- return set_next_rule_state(data_stream); +-} +- +-/************************** +- ******* A P I *********** +- **************************/ +- +-static ConversionState* ctor(Converter& c) +-{ return new Pcre(c); } +- +-static const ConvertMap pcre_api = +-{ +- "pcre", +- ctor, +-}; +- +-const ConvertMap* pcre_map = &pcre_api; +-} // namespace rules +- +--- /dev/null ++++ b/tools/snort2lua/rule_states/rule_pcre2.cc +@@ -0,0 +1,159 @@ ++//-------------------------------------------------------------------------- ++// Copyright (C) 2014-2024 Cisco and/or its affiliates. All rights reserved. ++// ++// This program is free software; you can redistribute it and/or modify it ++// under the terms of the GNU General Public License Version 2 as published ++// by the Free Software Foundation. You may not use, modify or distribute ++// this program under any other version of the GNU General Public License. ++// ++// This program is distributed in the hope that it will be useful, but ++// WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++// General Public License for more details. ++// ++// You should have received a copy of the GNU General Public License along ++// with this program; if not, write to the Free Software Foundation, Inc., ++// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ++//-------------------------------------------------------------------------- ++// rule_pcre2.cc author Josh Rosenbaum ++ ++#include ++#include ++ ++#include "conversion_state.h" ++#include "helpers/converter.h" ++#include "helpers/s2l_util.h" ++#include "rule_api.h" ++ ++namespace rules ++{ ++namespace ++{ ++class Pcre2 : public ConversionState ++{ ++public: ++ Pcre2(Converter& c) : ConversionState(c) { } ++ bool convert(std::istringstream& data) override; ++}; ++} // namespace ++ ++bool Pcre2::convert(std::istringstream& data_stream) ++{ ++ bool sticky_buffer_set = false; ++ std::string buffer = "pkt_data"; ++ ++ char delim = '/'; ++ std::string pcre2_str = util::get_rule_option_args(data_stream); ++ std::string pattern; ++ std::string new_opts; ++ std::string options; ++ ++ if (pcre2_str.front() == '!') ++ { ++ pattern += "!"; ++ pcre2_str.erase(pcre2_str.begin()); ++ } ++ ++ if (pcre2_str.front() != '"' || pcre2_str.back() != '"') ++ { ++ rule_api.bad_rule(data_stream, "pattern must be enclosed in \""); ++ return set_next_rule_state(data_stream); ++ } ++ ++ pcre2_str.erase(pcre2_str.begin()); ++ pattern += '"'; ++ ++ if (pcre2_str.front() == 'm') ++ { ++ pcre2_str.erase(pcre2_str.begin()); ++ pattern += 'm'; ++ delim = pcre2_str.front(); ++ } ++ ++ const std::size_t pattern_end = pcre2_str.rfind(delim); ++ if ((pcre2_str.front() != delim) || (pattern_end == 0)) ++ { ++ std::string tmp = "Regex must be enclosed in delim '"; ++ tmp.append(delim, 1); ++ rule_api.bad_rule(data_stream, tmp + "'"); ++ return set_next_rule_state(data_stream); ++ } ++ ++ pattern += pcre2_str.substr(0, pattern_end + 1); ++ options = pcre2_str.substr(pattern_end + 1, std::string::npos); ++ new_opts = ""; ++ ++ for (char c : options ) ++ { ++ std::string sticky_buffer = std::string(); // empty string ++ ++ switch (c) ++ { ++ case 'B': sticky_buffer = "raw_data"; break; ++ case 'U': sticky_buffer = "http_uri"; break; ++ case 'P': sticky_buffer = "pcre_P_option_body"; break; ++ case 'H': sticky_buffer = "pcre_H_option_header"; break; ++ case 'M': sticky_buffer = "http_method"; break; ++ case 'C': sticky_buffer = "http_cookie"; break; ++ case 'I': sticky_buffer = "http_raw_uri"; break; ++ case 'D': sticky_buffer = "http_raw_header"; break; ++ case 'K': sticky_buffer = "http_raw_cookie"; break; ++ case 'S': sticky_buffer = "http_stat_code"; break; ++ case 'Y': sticky_buffer = "http_stat_msg"; break; ++ case 'i': ++ case 's': ++ case 'm': ++ case 'x': ++ case 'A': ++ case 'E': ++ case 'G': ++ case 'O': ++ case 'R': ++ case '"': // end of reg_ex ++ new_opts += c; ++ break; ++ default: ++ { ++ std::string dlt_opt = "unknown option - '"; ++ dlt_opt.append(1, c); ++ dlt_opt += "'"; ++ rule_api.bad_rule(data_stream, dlt_opt); ++ break; ++ } ++ } ++ ++ if (!sticky_buffer.empty()) ++ { ++ buffer = sticky_buffer; ++ ++ if (sticky_buffer_set) ++ rule_api.bad_rule(data_stream, ++ "Two sticky buffers set for this regular expression!"); ++ else ++ sticky_buffer_set = true; ++ } ++ } ++ ++ rule_api.add_option("pcre", pattern + new_opts); ++ ++ rule_api.set_curr_options_buffer(buffer); ++ ++ return set_next_rule_state(data_stream); ++} ++ ++/************************** ++ ******* A P I *********** ++ **************************/ ++ ++static ConversionState* ctor(Converter& c) ++{ return new Pcre2(c); } ++ ++static const ConvertMap pcre2_api = ++{ ++ "pcre2", ++ ctor, ++}; ++ ++const ConvertMap* pcre2_map = &pcre2_api; ++} // namespace rules ++ +--- a/tools/snort2lua/rule_states/rule_sd_pattern.cc ++++ b/tools/snort2lua/rule_states/rule_sd_pattern.cc +@@ -41,7 +41,7 @@ private: + + std::string SDPattern::convert_pattern(const std::string& pattern) + { +- const std::string unused_pcre_tokens("()[].+*^$|"); ++ const std::string unused_pcre2_tokens("()[].+*^$|"); + + std::string s3_pattern; + +@@ -100,7 +100,7 @@ std::string SDPattern::convert_pattern(c + break; + + default: +- if (unused_pcre_tokens.find(sym) != std::string::npos) ++ if (unused_pcre2_tokens.find(sym) != std::string::npos) + s3_pattern.push_back('\\'); + s3_pattern.push_back(sym); + break; From c8b13adaa00d21da5dda89496bce91510cfb4eb6 Mon Sep 17 00:00:00 2001 From: Eric Fahlgren Date: Tue, 9 Apr 2024 07:23:46 -0700 Subject: [PATCH 10/20] snort3: fix bug with unset variable - Parameter not set in two places: /usr/bin/snort-mgr: eval: line 125: options: parameter not set Reported-by: @klingon888 Signed-off-by: Eric Fahlgren --- net/snort3/Makefile | 2 +- net/snort3/files/snort-mgr | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/net/snort3/Makefile b/net/snort3/Makefile index 48ce85b456..a8552428b7 100644 --- a/net/snort3/Makefile +++ b/net/snort3/Makefile @@ -7,7 +7,7 @@ include $(TOPDIR)/rules.mk PKG_NAME:=snort3 PKG_VERSION:=3.1.84.0 -PKG_RELEASE:=1 +PKG_RELEASE:=2 PKG_SOURCE_PROTO:=git PKG_SOURCE_VERSION:=$(PKG_VERSION) diff --git a/net/snort3/files/snort-mgr b/net/snort3/files/snort-mgr index 1fc54ca127..3ea1b3e4d3 100644 --- a/net/snort3/files/snort-mgr +++ b/net/snort3/files/snort-mgr @@ -54,7 +54,7 @@ nft_rm_table() { nft_add_table() { if [ "$(uci -q get snort.snort.method)" = "nfq" ]; then - local options + local options='' $VERBOSE && options='-e' print nftables | nft $options -f $STDIN $VERBOSE && nft list table inet snort @@ -118,7 +118,7 @@ check() { fi if [ "$(uci -q get snort.snort.method)" = "nfq" ]; then - local options + local options='' local test_nft="${CONF_DIR}/test_conf.nft" print nftables > "${test_nft}" || die "Errors during generation of nftables config" $VERBOSE && options='-e' From 8d36908aead7a37416ff4ac74d7c6ff59ded505e Mon Sep 17 00:00:00 2001 From: Ray Wang Date: Thu, 25 Apr 2024 21:36:14 +0800 Subject: [PATCH 11/20] hev-socks5-server: add new package HevSocks5Server is a high-performance socks5 server for Unix. More details: https://github.com/heiher/hev-socks5-server Signed-off-by: Ray Wang --- net/hev-socks5-server/Makefile | 49 +++++++++++++++++++ .../files/hev-socks5-server.config | 3 ++ .../files/hev-socks5-server.init | 34 +++++++++++++ net/hev-socks5-server/test.sh | 3 ++ 4 files changed, 89 insertions(+) create mode 100644 net/hev-socks5-server/Makefile create mode 100644 net/hev-socks5-server/files/hev-socks5-server.config create mode 100644 net/hev-socks5-server/files/hev-socks5-server.init create mode 100755 net/hev-socks5-server/test.sh diff --git a/net/hev-socks5-server/Makefile b/net/hev-socks5-server/Makefile new file mode 100644 index 0000000000..c307b0e6ac --- /dev/null +++ b/net/hev-socks5-server/Makefile @@ -0,0 +1,49 @@ +include $(TOPDIR)/rules.mk + +PKG_NAME:=hev-socks5-server +PKG_VERSION:=2.6.5 +PKG_RELEASE:=1 + +PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz +PKG_SOURCE_URL:=https://github.com/heiher/hev-socks5-server/releases/download/$(PKG_VERSION) +PKG_HASH:=07d3297483cc624464eec424f7dd27f2028f4f56c70c2c9d0b6902e181a32ccb + +PKG_MAINTAINER:=Ray Wang +PKG_LICENSE:=GPL-3.0-only +PKG_LICENSE_FILES:=License + +PKG_BUILD_FLAGS:=no-mips16 +PKG_BUILD_PARALLEL:=1 + +include $(INCLUDE_DIR)/package.mk + +define Package/hev-socks5-server + SECTION:=net + CATEGORY:=Network + TITLE:=A high-performance socks5 server for Unix + URL:=https://github.com/heiher/hev-socks5-server +endef + +MAKE_FLAGS += REV_ID="$(PKG_VERSION)" + +define Package/hev-socks5-server/conffiles +/etc/config/hev-socks5-server +/etc/hev-socks5-server/ +endef + +define Package/hev-socks5-server/install + $(INSTALL_DIR) $(1)/usr/bin + $(INSTALL_BIN) $(PKG_BUILD_DIR)/bin/hev-socks5-server $(1)/usr/bin/ + + $(INSTALL_DIR) $(1)/etc/hev-socks5-server/ + $(INSTALL_CONF) $(PKG_BUILD_DIR)/conf/main.yml $(1)/etc/hev-socks5-server/main.yml + $(INSTALL_CONF) $(PKG_BUILD_DIR)/conf/auth.txt $(1)/etc/hev-socks5-server/auth.txt + + $(INSTALL_DIR) $(1)/etc/config/ + $(INSTALL_CONF) ./files/hev-socks5-server.config $(1)/etc/config/hev-socks5-server + + $(INSTALL_DIR) $(1)/etc/init.d/ + $(INSTALL_BIN) ./files/hev-socks5-server.init $(1)/etc/init.d/hev-socks5-server +endef + +$(eval $(call BuildPackage,hev-socks5-server)) diff --git a/net/hev-socks5-server/files/hev-socks5-server.config b/net/hev-socks5-server/files/hev-socks5-server.config new file mode 100644 index 0000000000..0df231ada4 --- /dev/null +++ b/net/hev-socks5-server/files/hev-socks5-server.config @@ -0,0 +1,3 @@ +config hev-socks5-server 'config' + option enabled '0' + option conffile '/etc/hev-socks5-server/main.yml' diff --git a/net/hev-socks5-server/files/hev-socks5-server.init b/net/hev-socks5-server/files/hev-socks5-server.init new file mode 100644 index 0000000000..671870c1d5 --- /dev/null +++ b/net/hev-socks5-server/files/hev-socks5-server.init @@ -0,0 +1,34 @@ +#!/bin/sh /etc/rc.common + +USE_PROCD=1 +START=99 + +CONF="hev-socks5-server" +PROG="/usr/bin/hev-socks5-server" + +start_service() { + config_load "$CONF" + + local enabled + config_get_bool enabled "config" "enabled" "0" + [ "$enabled" -eq "1" ] || return 1 + + local conffile + config_get conffile "config" "conffile" + + procd_open_instance "$CONF" + procd_set_param command "$PROG" "$conffile" + procd_set_param file "$conffile" + + procd_set_param limits core="unlimited" + procd_set_param limits nofile="1000000 1000000" + procd_set_param stdout 1 + procd_set_param stderr 1 + procd_set_param respawn + + procd_close_instance +} + +service_triggers() { + procd_add_reload_trigger "$CONF" +} diff --git a/net/hev-socks5-server/test.sh b/net/hev-socks5-server/test.sh new file mode 100755 index 0000000000..9ae615ccdc --- /dev/null +++ b/net/hev-socks5-server/test.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +"$1" 2>&1 | grep "$2" From c3f21bfc78de9ab01aa126fb7a2089fd568fbc82 Mon Sep 17 00:00:00 2001 From: Olivier Poitrey Date: Sun, 28 Apr 2024 00:47:37 +0000 Subject: [PATCH 12/20] nextdns: Update to version 1.43.0 Signed-off-by: Olivier Poitrey --- net/nextdns/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/net/nextdns/Makefile b/net/nextdns/Makefile index 5efe9799bf..4bd4458772 100644 --- a/net/nextdns/Makefile +++ b/net/nextdns/Makefile @@ -8,13 +8,13 @@ include $(TOPDIR)/rules.mk PKG_NAME:=nextdns -PKG_VERSION:=1.42.0 +PKG_VERSION:=1.43.0 PKG_RELEASE:=1 PKG_SOURCE:=nextdns-$(PKG_VERSION).tar.gz PKG_SOURCE_VERSION:=v$(PKG_VERSION) PKG_SOURCE_URL:=https://codeload.github.com/nextdns/nextdns/tar.gz/v$(PKG_VERSION)? -PKG_HASH:=893692bea9015116d6ce610c94dccf744f3aa29aeb716a15cca17dd6e6b837c1 +PKG_HASH:=b5065135a04cc7fc169c62e203fe1721287cc867b60680e02d3c9bbcc865b85d PKG_MAINTAINER:=Olivier Poitrey PKG_LICENSE:=MIT From da176d58eebcdc62b63560e0d7b8bee2f8196d2e Mon Sep 17 00:00:00 2001 From: Christian Marangi Date: Wed, 25 Oct 2023 05:36:53 +0200 Subject: [PATCH 13/20] lua: add new package with version 5.4 Add new lua version 5.4 required by new version of nmap. Patches are copied from lua 5.3. - Readline patch has to be reworked as lua 5.4 now supports no readline for Linux but still needs some tweaks for macOS and bsd systems. - Patch shared lib required some rework. Signed-off-by: Christian Marangi --- lang/lua5.4/Makefile | 165 ++++++++++++++++++ .../001-include-version-number.patch | 65 +++++++ .../lua5.4/patches-host/100-no_readline.patch | 27 +++ .../patches/001-include-version-number.patch | 65 +++++++ lang/lua5.4/patches/020-shared_liblua.patch | 115 ++++++++++++ lang/lua5.4/patches/100-no_readline.patch | 27 +++ 6 files changed, 464 insertions(+) create mode 100644 lang/lua5.4/Makefile create mode 100644 lang/lua5.4/patches-host/001-include-version-number.patch create mode 100644 lang/lua5.4/patches-host/100-no_readline.patch create mode 100644 lang/lua5.4/patches/001-include-version-number.patch create mode 100644 lang/lua5.4/patches/020-shared_liblua.patch create mode 100644 lang/lua5.4/patches/100-no_readline.patch diff --git a/lang/lua5.4/Makefile b/lang/lua5.4/Makefile new file mode 100644 index 0000000000..b525c9a69f --- /dev/null +++ b/lang/lua5.4/Makefile @@ -0,0 +1,165 @@ +# +# Copyright (C) 2006-2023 OpenWrt.org +# +# This is free software, licensed under the GNU General Public License v2. +# See /LICENSE for more information. +# + +include $(TOPDIR)/rules.mk + +PKG_NAME:=lua +PKG_VERSION:=5.4.6 +PKG_RELEASE:=1 + +PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz +PKG_SOURCE_URL:=https://www.lua.org/ftp/ \ + https://www.tecgraf.puc-rio.br/lua/ftp/ +PKG_HASH:=7d5ea1b9cb6aa0b59ca3dde1c6adcb57ef83a1ba8e5432c0ecd06bf439b3ad88 +PKG_BUILD_PARALLEL:=1 + +PKG_MAINTAINER:=Christian Marangi + +PKG_LICENSE:=MIT +PKG_LICENSE_FILES:=COPYRIGHT +PKG_CPE_ID:=cpe:/a:lua:lua + +HOST_PATCH_DIR := ./patches-host + +include $(INCLUDE_DIR)/package.mk +include $(INCLUDE_DIR)/host-build.mk + +define Package/lua5.4/Default + SUBMENU:=Lua + SECTION:=lang + CATEGORY:=Languages + TITLE:=Lua programming language (version 5.4) + URL:=https://www.lua.org/ +endef + +define Package/lua5.4/Default/description + Lua is a powerful, efficient, lightweight, embeddable scripting language. It + supports procedural programming, object-oriented programming, functional + programming, data-driven programming, and data description. +endef + +define Package/liblua5.4 +$(call Package/lua5.4/Default) + SUBMENU:= + SECTION:=libs + CATEGORY:=Libraries + TITLE+= (libraries) + ABI_VERSION:=5.4 +endef + +define Package/liblua5.4/description +$(call Package/lua5.4/Default/description) + This package contains the Lua shared libraries, needed by other programs. +endef + +define Package/lua5.4 +$(call Package/lua5.4/Default) + DEPENDS:=+liblua5.4 + TITLE+= (interpreter) +endef + +define Package/lua5.4/description +$(call Package/lua5.4/Default/description) + This package contains the Lua language interpreter. +endef + +define Package/luac5.4 +$(call Package/lua5.4/Default) + DEPENDS:=+liblua5.4 + TITLE+= (compiler) +endef + +define Package/luac5.4/description +$(call Package/lua5.4/Default/description) + This package contains the Lua language compiler. +endef + +define Build/Prepare + $(call Build/Prepare/Default) + mv $(PKG_BUILD_DIR)/doc/lua.1 $(PKG_BUILD_DIR)/doc/lua5.4.1 + mv $(PKG_BUILD_DIR)/doc/luac.1 $(PKG_BUILD_DIR)/doc/luac5.4.1 +endef + +TARGET_CFLAGS += -DLUA_USE_LINUX $(FPIC) -std=gnu99 + +define Build/Compile + $(MAKE) $(PKG_JOBS) -C $(PKG_BUILD_DIR) \ + CC="$(TARGET_CROSS)gcc" \ + AR="$(TARGET_CROSS)ar rcu" \ + RANLIB="$(TARGET_CROSS)ranlib" \ + INSTALL_ROOT=/usr \ + CFLAGS="$(TARGET_CPPFLAGS) $(TARGET_CFLAGS)" \ + PKG_VERSION=$(PKG_VERSION) \ + linux + rm -rf $(PKG_INSTALL_DIR) + mkdir -p $(PKG_INSTALL_DIR) + $(MAKE) -C $(PKG_BUILD_DIR) \ + INSTALL_TOP="$(PKG_INSTALL_DIR)/usr" \ + install +endef + +define Host/Prepare + $(call Host/Prepare/Default) + mv $(HOST_BUILD_DIR)/doc/lua.1 $(HOST_BUILD_DIR)/doc/lua5.4.1 + mv $(HOST_BUILD_DIR)/doc/luac.1 $(HOST_BUILD_DIR)/doc/luac5.4.1 +endef + +define Host/Configure + $(SED) 's,"/usr/local/","$(STAGING_DIR_HOSTPKG)/",' $(HOST_BUILD_DIR)/src/luaconf.h +endef + +ifeq ($(HOST_OS),Darwin) + LUA_OS:=macosx +else + ifeq ($(HOST_OS),FreeBSD) + LUA_OS:=freebsd + else + LUA_OS:=linux + endif +endif + +define Host/Compile + $(MAKE) -C $(HOST_BUILD_DIR) \ + CC="$(HOSTCC) $(HOST_FPIC) -std=gnu99" \ + $(LUA_OS) +endef + +define Host/Install + $(MAKE) -C $(HOST_BUILD_DIR) \ + INSTALL_TOP="$(STAGING_DIR_HOSTPKG)" \ + install +endef + +define Build/InstallDev + $(INSTALL_DIR) $(1)/usr/include/lua5.4 $(1)/usr/lib + $(CP) $(PKG_INSTALL_DIR)/usr/include/lua5.4/lua{,lib,conf}.h $(1)/usr/include/lua5.4/ + $(CP) $(PKG_INSTALL_DIR)/usr/include/lua5.4/lua.hpp $(1)/usr/include/lua5.4/ + $(CP) $(PKG_INSTALL_DIR)/usr/include/lua5.4/lauxlib.h $(1)/usr/include/lua5.4/ + $(INSTALL_DIR) $(1)/usr/lib + $(CP) $(PKG_INSTALL_DIR)/usr/lib/liblua5.4.{a,so*} $(1)/usr/lib/ + $(LN) liblua5.4.so.0.0.0 $(1)/usr/lib/liblualib5.4.so +endef + +define Package/liblua5.4/install + $(INSTALL_DIR) $(1)/usr/lib + $(CP) $(PKG_INSTALL_DIR)/usr/lib/liblua5.4.so* $(1)/usr/lib/ +endef + +define Package/lua5.4/install + $(INSTALL_DIR) $(1)/usr/bin + $(INSTALL_BIN) $(PKG_INSTALL_DIR)/usr/bin/lua5.4 $(1)/usr/bin/ +endef + +define Package/luac5.4/install + $(INSTALL_DIR) $(1)/usr/bin + $(INSTALL_BIN) $(PKG_INSTALL_DIR)/usr/bin/luac5.4 $(1)/usr/bin/ +endef + +$(eval $(call BuildPackage,liblua5.4)) +$(eval $(call BuildPackage,lua5.4)) +$(eval $(call BuildPackage,luac5.4)) +$(eval $(call HostBuild)) diff --git a/lang/lua5.4/patches-host/001-include-version-number.patch b/lang/lua5.4/patches-host/001-include-version-number.patch new file mode 100644 index 0000000000..99c225eeda --- /dev/null +++ b/lang/lua5.4/patches-host/001-include-version-number.patch @@ -0,0 +1,65 @@ +From 96576b44a1b368bd6590eb0778ae45cc9ccede3f Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Rafa=C5=82=20Mi=C5=82ecki?= +Date: Fri, 21 Jun 2019 14:08:38 +0200 +Subject: [PATCH] include version number + +Including it allows multiple lua versions to coexist. + +Signed-off-by: Rafał Miłecki +--- + +--- a/Makefile ++++ b/Makefile +@@ -12,7 +12,7 @@ PLAT= guess + # LUA_ROOT, LUA_LDIR, and LUA_CDIR in luaconf.h. + INSTALL_TOP= /usr/local + INSTALL_BIN= $(INSTALL_TOP)/bin +-INSTALL_INC= $(INSTALL_TOP)/include ++INSTALL_INC= $(INSTALL_TOP)/include/lua$V + INSTALL_LIB= $(INSTALL_TOP)/lib + INSTALL_MAN= $(INSTALL_TOP)/man/man1 + INSTALL_LMOD= $(INSTALL_TOP)/share/lua/$V +@@ -39,10 +39,10 @@ RM= rm -f + PLATS= guess aix bsd c89 freebsd generic ios linux linux-readline macosx mingw posix solaris + + # What to install. +-TO_BIN= lua luac ++TO_BIN= lua$V luac$V + TO_INC= lua.h luaconf.h lualib.h lauxlib.h lua.hpp +-TO_LIB= liblua.a +-TO_MAN= lua.1 luac.1 ++TO_LIB= liblua$V.a ++TO_MAN= lua$V.1 luac$V.1 + + # Lua version and release. + V= 5.4 +@@ -52,7 +52,7 @@ R= $V.6 + all: $(PLAT) + + $(PLATS) help test clean: +- @cd src && $(MAKE) $@ ++ @cd src && $(MAKE) $@ V=$V + + install: dummy + cd src && $(MKDIR) $(INSTALL_BIN) $(INSTALL_INC) $(INSTALL_LIB) $(INSTALL_MAN) $(INSTALL_LMOD) $(INSTALL_CMOD) +--- a/src/Makefile ++++ b/src/Makefile +@@ -32,15 +32,15 @@ CMCFLAGS= + + PLATS= guess aix bsd c89 freebsd generic ios linux linux-readline macosx mingw posix solaris + +-LUA_A= liblua.a ++LUA_A= liblua$V.a + CORE_O= lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o ltm.o lundump.o lvm.o lzio.o + LIB_O= lauxlib.o lbaselib.o lcorolib.o ldblib.o liolib.o lmathlib.o loadlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o linit.o + BASE_O= $(CORE_O) $(LIB_O) $(MYOBJS) + +-LUA_T= lua ++LUA_T= lua$V + LUA_O= lua.o + +-LUAC_T= luac ++LUAC_T= luac$V + LUAC_O= luac.o + + ALL_O= $(BASE_O) $(LUA_O) $(LUAC_O) diff --git a/lang/lua5.4/patches-host/100-no_readline.patch b/lang/lua5.4/patches-host/100-no_readline.patch new file mode 100644 index 0000000000..64567dd0ad --- /dev/null +++ b/lang/lua5.4/patches-host/100-no_readline.patch @@ -0,0 +1,27 @@ +--- a/src/Makefile ++++ b/src/Makefile +@@ -126,7 +126,7 @@ c89: + @echo '' + + FreeBSD NetBSD OpenBSD freebsd: +- $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX -DLUA_USE_READLINE -I/usr/include/edit" SYSLIBS="-Wl,-E -ledit" CC="cc" ++ $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX $(if $(USE_READLINE), -DLUA_USE_READLINE) -I/usr/include/edit" SYSLIBS="-Wl,-E -ledit" CC="cc" + + generic: $(ALL) + +@@ -136,13 +136,13 @@ ios: + Linux linux: linux-noreadline + + linux-noreadline: +- $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX" SYSLIBS="-Wl,-E -ldl" ++ $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX $(if $(USE_READLINE), -DLUA_USE_READLINE)" SYSLIBS="-Wl,-E -ldl $(if $(USE_READLINE), -lreadline)" + + linux-readline: + $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX -DLUA_USE_READLINE" SYSLIBS="-Wl,-E -ldl -lreadline" + + Darwin macos macosx: +- $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_MACOSX -DLUA_USE_READLINE" SYSLIBS="-lreadline" ++ $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_MACOSX $(if $(USE_READLINE), -DLUA_USE_READLINE)" SYSLIBS="$(if $(USE_READLINE), -lreadline)" + + mingw: + $(MAKE) "LUA_A=lua54.dll" "LUA_T=lua.exe" \ diff --git a/lang/lua5.4/patches/001-include-version-number.patch b/lang/lua5.4/patches/001-include-version-number.patch new file mode 100644 index 0000000000..99c225eeda --- /dev/null +++ b/lang/lua5.4/patches/001-include-version-number.patch @@ -0,0 +1,65 @@ +From 96576b44a1b368bd6590eb0778ae45cc9ccede3f Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Rafa=C5=82=20Mi=C5=82ecki?= +Date: Fri, 21 Jun 2019 14:08:38 +0200 +Subject: [PATCH] include version number + +Including it allows multiple lua versions to coexist. + +Signed-off-by: Rafał Miłecki +--- + +--- a/Makefile ++++ b/Makefile +@@ -12,7 +12,7 @@ PLAT= guess + # LUA_ROOT, LUA_LDIR, and LUA_CDIR in luaconf.h. + INSTALL_TOP= /usr/local + INSTALL_BIN= $(INSTALL_TOP)/bin +-INSTALL_INC= $(INSTALL_TOP)/include ++INSTALL_INC= $(INSTALL_TOP)/include/lua$V + INSTALL_LIB= $(INSTALL_TOP)/lib + INSTALL_MAN= $(INSTALL_TOP)/man/man1 + INSTALL_LMOD= $(INSTALL_TOP)/share/lua/$V +@@ -39,10 +39,10 @@ RM= rm -f + PLATS= guess aix bsd c89 freebsd generic ios linux linux-readline macosx mingw posix solaris + + # What to install. +-TO_BIN= lua luac ++TO_BIN= lua$V luac$V + TO_INC= lua.h luaconf.h lualib.h lauxlib.h lua.hpp +-TO_LIB= liblua.a +-TO_MAN= lua.1 luac.1 ++TO_LIB= liblua$V.a ++TO_MAN= lua$V.1 luac$V.1 + + # Lua version and release. + V= 5.4 +@@ -52,7 +52,7 @@ R= $V.6 + all: $(PLAT) + + $(PLATS) help test clean: +- @cd src && $(MAKE) $@ ++ @cd src && $(MAKE) $@ V=$V + + install: dummy + cd src && $(MKDIR) $(INSTALL_BIN) $(INSTALL_INC) $(INSTALL_LIB) $(INSTALL_MAN) $(INSTALL_LMOD) $(INSTALL_CMOD) +--- a/src/Makefile ++++ b/src/Makefile +@@ -32,15 +32,15 @@ CMCFLAGS= + + PLATS= guess aix bsd c89 freebsd generic ios linux linux-readline macosx mingw posix solaris + +-LUA_A= liblua.a ++LUA_A= liblua$V.a + CORE_O= lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o ltm.o lundump.o lvm.o lzio.o + LIB_O= lauxlib.o lbaselib.o lcorolib.o ldblib.o liolib.o lmathlib.o loadlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o linit.o + BASE_O= $(CORE_O) $(LIB_O) $(MYOBJS) + +-LUA_T= lua ++LUA_T= lua$V + LUA_O= lua.o + +-LUAC_T= luac ++LUAC_T= luac$V + LUAC_O= luac.o + + ALL_O= $(BASE_O) $(LUA_O) $(LUAC_O) diff --git a/lang/lua5.4/patches/020-shared_liblua.patch b/lang/lua5.4/patches/020-shared_liblua.patch new file mode 100644 index 0000000000..f2bed51588 --- /dev/null +++ b/lang/lua5.4/patches/020-shared_liblua.patch @@ -0,0 +1,115 @@ +--- a/Makefile ++++ b/Makefile +@@ -41,7 +41,7 @@ PLATS= guess aix bsd c89 freebsd generic + # What to install. + TO_BIN= lua$V luac$V + TO_INC= lua.h luaconf.h lualib.h lauxlib.h lua.hpp +-TO_LIB= liblua$V.a ++TO_LIB= liblua$V.a liblua$V.so.0.0.0 + TO_MAN= lua$V.1 luac$V.1 + + # Lua version and release. +@@ -59,6 +59,9 @@ install: dummy + cd src && $(INSTALL_EXEC) $(TO_BIN) $(INSTALL_BIN) + cd src && $(INSTALL_DATA) $(TO_INC) $(INSTALL_INC) + cd src && $(INSTALL_DATA) $(TO_LIB) $(INSTALL_LIB) ++ ln -s liblua$V.so.0.0.0 $(INSTALL_LIB)/liblua$V.so.0.0 ++ ln -s liblua$V.so.0.0.0 $(INSTALL_LIB)/liblua$V.so.0 ++ ln -s liblua$V.so.0.0.0 $(INSTALL_LIB)/liblua$V.so + cd doc && $(INSTALL_DATA) $(TO_MAN) $(INSTALL_MAN) + + uninstall: +--- a/src/ldebug.h ++++ b/src/ldebug.h +@@ -36,7 +36,7 @@ + #endif + + +-LUAI_FUNC int luaG_getfuncline (const Proto *f, int pc); ++LUA_API int luaG_getfuncline (const Proto *f, int pc); + LUAI_FUNC const char *luaG_findlocal (lua_State *L, CallInfo *ci, int n, + StkId *pos); + LUAI_FUNC l_noret luaG_typeerror (lua_State *L, const TValue *o, +--- a/src/lstring.h ++++ b/src/lstring.h +@@ -50,7 +50,7 @@ LUAI_FUNC void luaS_init (lua_State *L); + LUAI_FUNC void luaS_remove (lua_State *L, TString *ts); + LUAI_FUNC Udata *luaS_newudata (lua_State *L, size_t s, int nuvalue); + LUAI_FUNC TString *luaS_newlstr (lua_State *L, const char *str, size_t l); +-LUAI_FUNC TString *luaS_new (lua_State *L, const char *str); ++LUA_API TString *luaS_new (lua_State *L, const char *str); + LUAI_FUNC TString *luaS_createlngstrobj (lua_State *L, size_t l); + + +--- a/src/lundump.h ++++ b/src/lundump.h +@@ -30,7 +30,7 @@ + LUAI_FUNC LClosure* luaU_undump (lua_State* L, ZIO* Z, const char* name); + + /* dump one chunk; from ldump.c */ +-LUAI_FUNC int luaU_dump (lua_State* L, const Proto* f, lua_Writer w, ++LUA_API int luaU_dump (lua_State* L, const Proto* f, lua_Writer w, + void* data, int strip); + + #endif +--- a/src/lzio.h ++++ b/src/lzio.h +@@ -44,7 +44,7 @@ typedef struct Mbuffer { + #define luaZ_freebuffer(L, buff) luaZ_resizebuffer(L, buff, 0) + + +-LUAI_FUNC void luaZ_init (lua_State *L, ZIO *z, lua_Reader reader, ++LUA_API void luaZ_init (lua_State *L, ZIO *z, lua_Reader reader, + void *data); + LUAI_FUNC size_t luaZ_read (ZIO* z, void *b, size_t n); /* read next n bytes */ + +--- a/src/Makefile ++++ b/src/Makefile +@@ -33,6 +33,7 @@ CMCFLAGS= + PLATS= guess aix bsd c89 freebsd generic ios linux linux-readline macosx mingw posix solaris + + LUA_A= liblua$V.a ++LUA_SO= liblua$V.so.0.0.0 + CORE_O= lapi.o lcode.o lctype.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o ltm.o lundump.o lvm.o lzio.o + LIB_O= lauxlib.o lbaselib.o lcorolib.o ldblib.o liolib.o lmathlib.o loadlib.o loslib.o lstrlib.o ltablib.o lutf8lib.o linit.o + BASE_O= $(CORE_O) $(LIB_O) $(MYOBJS) +@@ -44,8 +45,9 @@ LUAC_T= luac$V + LUAC_O= luac.o + + ALL_O= $(BASE_O) $(LUA_O) $(LUAC_O) +-ALL_T= $(LUA_A) $(LUA_T) $(LUAC_T) ++ALL_T= $(LUA_A) $(LUA_SO) $(LUA_T) $(LUAC_T) + ALL_A= $(LUA_A) ++ALL_SO= $(LUA_SO) + + # Targets start here. + default: $(PLAT) +@@ -56,14 +58,25 @@ o: $(ALL_O) + + a: $(ALL_A) + ++so: $(ALL_SO) ++ + $(LUA_A): $(BASE_O) + $(AR) $@ $(BASE_O) + $(RANLIB) $@ + +-$(LUA_T): $(LUA_O) $(LUA_A) +- $(CC) -o $@ $(LDFLAGS) $(LUA_O) $(LUA_A) $(LIBS) ++$(LUA_SO): $(CORE_O) $(LIB_O) ++ $(CC) -o $@ -Wl,-Bsymbolic-functions -shared -Wl,-soname="$@" $? ++ ln -fs $@ liblua$V.so.0.0 ++ ln -fs $@ liblua$V.so.0 ++ ln -fs $@ liblua$V.so ++ ++$(LUA_T): $(LUA_O) $(LUA_SO) ++ $(CC) -o $@ -L. -llua$V $(MYLDFLAGS) $(LUA_O) $(LIBS) ++ ++$(LUAC_T): $(LUAC_O) $(LUA_SO) ++ $(CC) -o $@ -L. -llua$V $(MYLDFLAGS) $(LUAC_O) $(LIBS) + +-$(LUAC_T): $(LUAC_O) $(LUA_A) ++$(LUAC_T)-host: $(LUAC_O) $(LUA_A) + $(CC) -o $@ $(LDFLAGS) $(LUAC_O) $(LUA_A) $(LIBS) + + test: diff --git a/lang/lua5.4/patches/100-no_readline.patch b/lang/lua5.4/patches/100-no_readline.patch new file mode 100644 index 0000000000..64567dd0ad --- /dev/null +++ b/lang/lua5.4/patches/100-no_readline.patch @@ -0,0 +1,27 @@ +--- a/src/Makefile ++++ b/src/Makefile +@@ -126,7 +126,7 @@ c89: + @echo '' + + FreeBSD NetBSD OpenBSD freebsd: +- $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX -DLUA_USE_READLINE -I/usr/include/edit" SYSLIBS="-Wl,-E -ledit" CC="cc" ++ $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX $(if $(USE_READLINE), -DLUA_USE_READLINE) -I/usr/include/edit" SYSLIBS="-Wl,-E -ledit" CC="cc" + + generic: $(ALL) + +@@ -136,13 +136,13 @@ ios: + Linux linux: linux-noreadline + + linux-noreadline: +- $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX" SYSLIBS="-Wl,-E -ldl" ++ $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX $(if $(USE_READLINE), -DLUA_USE_READLINE)" SYSLIBS="-Wl,-E -ldl $(if $(USE_READLINE), -lreadline)" + + linux-readline: + $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_LINUX -DLUA_USE_READLINE" SYSLIBS="-Wl,-E -ldl -lreadline" + + Darwin macos macosx: +- $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_MACOSX -DLUA_USE_READLINE" SYSLIBS="-lreadline" ++ $(MAKE) $(ALL) SYSCFLAGS="-DLUA_USE_MACOSX $(if $(USE_READLINE), -DLUA_USE_READLINE)" SYSLIBS="$(if $(USE_READLINE), -lreadline)" + + mingw: + $(MAKE) "LUA_A=lua54.dll" "LUA_T=lua.exe" \ From 7521cec07ca08ee5a5342b5bcdef805e6e10532e Mon Sep 17 00:00:00 2001 From: Olivier Poitrey Date: Sun, 28 Apr 2024 13:06:30 +0000 Subject: [PATCH 14/20] nextdns: Update to version 1.43.1 Signed-off-by: Olivier Poitrey --- net/nextdns/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/net/nextdns/Makefile b/net/nextdns/Makefile index 4bd4458772..362aa8f39f 100644 --- a/net/nextdns/Makefile +++ b/net/nextdns/Makefile @@ -8,13 +8,13 @@ include $(TOPDIR)/rules.mk PKG_NAME:=nextdns -PKG_VERSION:=1.43.0 +PKG_VERSION:=1.43.1 PKG_RELEASE:=1 PKG_SOURCE:=nextdns-$(PKG_VERSION).tar.gz PKG_SOURCE_VERSION:=v$(PKG_VERSION) PKG_SOURCE_URL:=https://codeload.github.com/nextdns/nextdns/tar.gz/v$(PKG_VERSION)? -PKG_HASH:=b5065135a04cc7fc169c62e203fe1721287cc867b60680e02d3c9bbcc865b85d +PKG_HASH:=39d6073dc89b2bb91d03c2e9a4b5d6717ed8d6435232c02b18301ea9dfc5a6ad PKG_MAINTAINER:=Olivier Poitrey PKG_LICENSE:=MIT From cbe30884e6c171312e1f05bd0c72da20f1ff233c Mon Sep 17 00:00:00 2001 From: Christian Marangi Date: Wed, 25 Oct 2023 05:41:55 +0200 Subject: [PATCH 15/20] nmap: bump to version 7.94 Bump to version 7.94. Nmap now require lua 5.4. Patch 020-Python3-port-of-ndiff.patch has been merged upstream and can be dropped. Patch 001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch is now required to fix a problem with header inclusion for lua 5.4. Signed-off-by: Christian Marangi --- net/nmap/Makefile | 10 +- ...HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch | 35 + .../patches/020-Python3-port-of-ndiff.patch | 1734 ----------------- 3 files changed, 40 insertions(+), 1739 deletions(-) create mode 100644 net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch delete mode 100644 net/nmap/patches/020-Python3-port-of-ndiff.patch diff --git a/net/nmap/Makefile b/net/nmap/Makefile index 501ea06728..d13dad50d9 100644 --- a/net/nmap/Makefile +++ b/net/nmap/Makefile @@ -13,13 +13,13 @@ include $(TOPDIR)/rules.mk PKG_NAME:=nmap -PKG_VERSION:=7.93 -PKG_RELEASE:=4 +PKG_VERSION:=7.94 +PKG_RELEASE:=1 PKG_MAINTAINER:=Nuno Gonçalves PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 PKG_SOURCE_URL:=https://nmap.org/dist/ -PKG_HASH:=55bcfe4793e25acc96ba4274d8c4228db550b8e8efd72004b38ec55a2dd16651 +PKG_HASH:=d71be189eec43d7e099bac8571509d316c4577ca79491832ac3e1217bc8f92cc PKG_LICENSE:=NPSL-0.94-or-NPSL-0.95 PKG_LICENSE_FILES:=LICENSE PKG_CPE_ID:=cpe:/a:nmap:nmap @@ -60,7 +60,7 @@ endef define Package/nmap-full $(call Package/nmap/default) - DEPENDS:=$(NMAP_DEPENDS) $(SSL_DEPENDS) +liblua5.3 +libssh2 + DEPENDS:=$(NMAP_DEPENDS) +libopenssl +liblua5.4 +libssh2 VARIANT:=full TITLE:=Nmap (with OpenSSL and scripting support) endef @@ -81,7 +81,7 @@ endef define Package/ncat-full $(call Package/nmap/default) - DEPENDS:=$(NCAT_DEPENDS) $(SSL_DEPENDS) +liblua5.3 + DEPENDS:=$(NCAT_DEPENDS) +libopenssl +liblua5.4 VARIANT:=full TITLE:=Ncat (with OpenSSL and scripting support) endef diff --git a/net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch b/net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch new file mode 100644 index 0000000000..fbac9bf024 --- /dev/null +++ b/net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch @@ -0,0 +1,35 @@ +From b9263f056ab3acd666d25af84d399410560d48ac Mon Sep 17 00:00:00 2001 +From: dmiller +Date: Tue, 30 May 2023 18:33:07 +0000 +Subject: [PATCH] Use correct HAVE_ macros for Lua 5.4. Fixes #2648 + +--- + ncat/config.h.in | 4 ++-- + nmap_config.h.in | 4 ++-- + 2 files changed, 4 insertions(+), 4 deletions(-) + +--- a/ncat/config.h.in ++++ b/ncat/config.h.in +@@ -191,7 +191,7 @@ + + /* Defines for locating Lua */ + #undef LUA_INCLUDED +-#undef HAVE_LUA5_3_LUA_H +-#undef HAVE_LUA_5_3_LUA_H ++#undef HAVE_LUA5_4_LUA_H ++#undef HAVE_LUA_5_4_LUA_H + #undef HAVE_LUA_H + #undef HAVE_LUA_LUA_H +--- a/nmap_config.h.in ++++ b/nmap_config.h.in +@@ -172,8 +172,8 @@ extern "C" int gethostname (char *, unsi + #undef HAVE_PCAP_SET_IMMEDIATE_MODE + + /* Various possibilities for lua.h */ +-#undef HAVE_LUA5_3_LUA_H +-#undef HAVE_LUA_5_3_LUA_H ++#undef HAVE_LUA5_4_LUA_H ++#undef HAVE_LUA_5_4_LUA_H + #undef HAVE_LUA_H + #undef HAVE_LUA_LUA_H + diff --git a/net/nmap/patches/020-Python3-port-of-ndiff.patch b/net/nmap/patches/020-Python3-port-of-ndiff.patch deleted file mode 100644 index 019be8ac8f..0000000000 --- a/net/nmap/patches/020-Python3-port-of-ndiff.patch +++ /dev/null @@ -1,1734 +0,0 @@ -From: Bryan Quigley -Date: Sat, 2 Nov 2019 21:06:44 -0700 -Subject: Python3 port of ndiff - -Ported all python scrips in ndiff/ except setup.py - -Some hints on cmp taken from #1484 - -Minor tweaks to Makefile to support python3, but unsure if -there is a better way to do that. - -Seperated .travis.yml commands for easier debugging where it breaks. - -This closes the easy half of #1176 - -Resolves: #1484 ---- - .travis.yml | 8 +- - Makefile.in | 6 +- - ndiff/ndiff.py | 503 +++++++++++++++++++++--------------------- - ndiff/ndifftest.py | 94 ++++---- - ndiff/scripts/ndiff | 14 +- - ndiff/setup.py | 44 ++-- - ndiff/test-scans/anonymize.py | 18 +- - 7 files changed, 346 insertions(+), 341 deletions(-) - mode change 100644 => 100755 ndiff/setup.py - ---- a/.travis.yml -+++ b/.travis.yml -@@ -4,7 +4,13 @@ compiler: - - clang - # Change this to your needs - sudo: false --script: mkdir /tmp/n && ./configure $SSL_FLAG $LUA_FLAG --prefix=/tmp/n && make && make check && make install && /tmp/n/bin/nmap -A localhost -+script: -+ - "mkdir /tmp/n" -+ - "./configure $SSL_FLAG $LUA_FLAG --prefix=/tmp/n" -+ - "make" -+ - "make check" -+ - "make install" -+ - "/tmp/n/bin/nmap -A localhost" - - env: - - SSL_FLAG="--without-openssl" LUA_FLAG="--without-liblua" ---- a/Makefile.in -+++ b/Makefile.in -@@ -34,6 +34,7 @@ ZENMAPDIR = @ZENMAPDIR@ - NDIFFDIR = @NDIFFDIR@ - NPINGDIR = @NPINGDIR@ - PYTHON = @PYTHON@ -+PYTHON3 = /usr/bin/env python3 - DEFS = @DEFS@ -DNMAP_PLATFORM=\"$(NMAP_PLATFORM)\" -DNMAPDATADIR=\"$(nmapdatadir)\" - # With GCC, add extra security checks to source code. - # http://gcc.gnu.org/ml/gcc-patches/2004-09/msg02055.html -@@ -361,6 +362,7 @@ tests/check_dns: $(OBJS) - # this as the location of the interpreter whenever we're not doing a - # local installation. - DEFAULT_PYTHON_PATH = /usr/bin/env python -+DEFAULT_PYTHON3_PATH = /usr/bin/env python3 - - build-zenmap: $(ZENMAPDIR)/setup.py $(ZENMAPDIR)/zenmapCore/Version.py - # When DESTDIR is defined, assume we're building an executable -@@ -381,7 +383,7 @@ install-zenmap: $(ZENMAPDIR)/setup.py - ln -sf zenmap $(DESTDIR)$(bindir)/xnmap - - build-ndiff: -- cd $(NDIFFDIR) && $(PYTHON) setup.py build $(if $(DESTDIR),--executable "$(DEFAULT_PYTHON_PATH)") -+ cd $(NDIFFDIR) && $(PYTHON) setup.py build $(if $(DESTDIR),--executable "$(DEFAULT_PYTHON3_PATH)") - - build-nping: $(NPINGDIR)/Makefile build-nbase build-nsock build-netutil $(NPINGDIR)/nping.h @DNET_BUILD@ @PCAP_BUILD@ - @cd $(NPINGDIR) && $(MAKE) -@@ -451,7 +453,7 @@ check-ncat: - @cd $(NCATDIR) && $(MAKE) check - - check-ndiff: -- @cd $(NDIFFDIR) && $(PYTHON) ndifftest.py -+ @cd $(NDIFFDIR) && $(PYTHON3) ndifftest.py - - check-nsock: - @cd $(NSOCKDIR)/src && $(MAKE) check ---- a/ndiff/ndiff.py -+++ b/ndiff/ndiff.py -@@ -1,4 +1,4 @@ --#!/usr/bin/env python -+#!/usr/bin/env python3 - - # Ndiff - # -@@ -25,11 +25,11 @@ xml.__path__ = [x for x in xml.__path__ - import xml.sax - import xml.sax.saxutils - import xml.dom.minidom --from StringIO import StringIO -+from io import StringIO - - verbose = False - --NDIFF_XML_VERSION = u"1" -+NDIFF_XML_VERSION = "1" - - - class OverrideEntityResolver(xml.sax.handler.EntityResolver): -@@ -74,35 +74,35 @@ class Scan(object): - def write_nmaprun_open(self, writer): - attrs = {} - if self.scanner is not None: -- attrs[u"scanner"] = self.scanner -+ attrs["scanner"] = self.scanner - if self.args is not None: -- attrs[u"args"] = self.args -+ attrs["args"] = self.args - if self.start_date is not None: -- attrs[u"start"] = "%d" % time.mktime(self.start_date.timetuple()) -- attrs[u"startstr"] = self.start_date.strftime( -+ attrs["start"] = "%d" % time.mktime(self.start_date.timetuple()) -+ attrs["startstr"] = self.start_date.strftime( - "%a %b %d %H:%M:%S %Y") - if self.version is not None: -- attrs[u"version"] = self.version -- writer.startElement(u"nmaprun", attrs) -+ attrs["version"] = self.version -+ writer.startElement("nmaprun", attrs) - - def write_nmaprun_close(self, writer): -- writer.endElement(u"nmaprun") -+ writer.endElement("nmaprun") - - def nmaprun_to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- elem = document.createElement(u"nmaprun") -+ elem = document.createElement("nmaprun") - if self.scanner is not None: -- elem.setAttribute(u"scanner", self.scanner) -+ elem.setAttribute("scanner", self.scanner) - if self.args is not None: -- elem.setAttribute(u"args", self.args) -+ elem.setAttribute("args", self.args) - if self.start_date is not None: - elem.setAttribute( -- u"start", "%d" % time.mktime(self.start_date.timetuple())) -+ "start", "%d" % time.mktime(self.start_date.timetuple())) - elem.setAttribute( -- u"startstr", -+ "startstr", - self.start_date.strftime("%a %b %d %H:%M:%S %Y")) - if self.version is not None: -- elem.setAttribute(u"version", self.version) -+ elem.setAttribute("version", self.version) - frag.appendChild(elem) - return frag - -@@ -132,17 +132,17 @@ class Host(object): - - def format_name(self): - """Return a human-readable identifier for this host.""" -- address_s = u", ".join(a.s for a in sorted(self.addresses)) -- hostname_s = u", ".join(sorted(self.hostnames)) -+ address_s = ", ".join(a.s for a in sorted(self.addresses)) -+ hostname_s = ", ".join(sorted(self.hostnames)) - if len(hostname_s) > 0: - if len(address_s) > 0: -- return u"%s (%s)" % (hostname_s, address_s) -+ return "%s (%s)" % (hostname_s, address_s) - else: - return hostname_s - elif len(address_s) > 0: - return address_s - else: -- return u"" -+ return "" - - def add_port(self, port): - self.ports[port.spec] = port -@@ -159,46 +159,46 @@ class Host(object): - return state is None or state in self.extraports - - def extraports_string(self): -- list = [(count, state) for (state, count) in self.extraports.items()] -+ locallist = [(count, state) for (state, count) in list(self.extraports.items())] - # Reverse-sort by count. -- list.sort(reverse=True) -- return u", ".join( -- [u"%d %s ports" % (count, state) for (count, state) in list]) -+ locallist.sort(reverse=True) -+ return ", ".join( -+ ["%d %s ports" % (count, state) for (count, state) in locallist]) - - def state_to_dom_fragment(self, document): - frag = document.createDocumentFragment() - if self.state is not None: -- elem = document.createElement(u"status") -- elem.setAttribute(u"state", self.state) -+ elem = document.createElement("status") -+ elem.setAttribute("state", self.state) - frag.appendChild(elem) - return frag - - def hostname_to_dom_fragment(self, document, hostname): - frag = document.createDocumentFragment() -- elem = document.createElement(u"hostname") -- elem.setAttribute(u"name", hostname) -+ elem = document.createElement("hostname") -+ elem.setAttribute("name", hostname) - frag.appendChild(elem) - return frag - - def extraports_to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- for state, count in self.extraports.items(): -- elem = document.createElement(u"extraports") -- elem.setAttribute(u"state", state) -- elem.setAttribute(u"count", unicode(count)) -+ for state, count in list(self.extraports.items()): -+ elem = document.createElement("extraports") -+ elem.setAttribute("state", state) -+ elem.setAttribute("count", str(count)) - frag.appendChild(elem) - return frag - - def os_to_dom_fragment(self, document, os): - frag = document.createDocumentFragment() -- elem = document.createElement(u"osmatch") -- elem.setAttribute(u"name", os) -+ elem = document.createElement("osmatch") -+ elem.setAttribute("name", os) - frag.appendChild(elem) - return frag - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- elem = document.createElement(u"host") -+ elem = document.createElement("host") - - if self.state is not None: - elem.appendChild(self.state_to_dom_fragment(document)) -@@ -207,13 +207,13 @@ class Host(object): - elem.appendChild(addr.to_dom_fragment(document)) - - if len(self.hostnames) > 0: -- hostnames_elem = document.createElement(u"hostnames") -+ hostnames_elem = document.createElement("hostnames") - for hostname in self.hostnames: - hostnames_elem.appendChild( - self.hostname_to_dom_fragment(document, hostname)) - elem.appendChild(hostnames_elem) - -- ports_elem = document.createElement(u"ports") -+ ports_elem = document.createElement("ports") - ports_elem.appendChild(self.extraports_to_dom_fragment(document)) - for port in sorted(self.ports.values()): - if not self.is_extraports(port.state): -@@ -222,13 +222,13 @@ class Host(object): - elem.appendChild(ports_elem) - - if len(self.os) > 0: -- os_elem = document.createElement(u"os") -+ os_elem = document.createElement("os") - for os in self.os: - os_elem.appendChild(self.os_to_dom_fragment(document, os)) - elem.appendChild(os_elem) - - if len(self.script_results) > 0: -- hostscript_elem = document.createElement(u"hostscript") -+ hostscript_elem = document.createElement("hostscript") - for sr in self.script_results: - hostscript_elem.appendChild(sr.to_dom_fragment(document)) - elem.appendChild(hostscript_elem) -@@ -242,7 +242,7 @@ class Address(object): - self.s = s - - def __eq__(self, other): -- return self.__cmp__(other) == 0 -+ return self.sort_key() == other.sort_key() - - def __ne__(self, other): - return not self.__eq__(other) -@@ -250,8 +250,8 @@ class Address(object): - def __hash__(self): - return hash(self.sort_key()) - -- def __cmp__(self, other): -- return cmp(self.sort_key(), other.sort_key()) -+ def __lt__(self, other): -+ return self.sort_key() < other.sort_key() - - def __str__(self): - return str(self.s) -@@ -260,21 +260,21 @@ class Address(object): - return self.s - - def new(type, s): -- if type == u"ipv4": -+ if type == "ipv4": - return IPv4Address(s) -- elif type == u"ipv6": -+ elif type == "ipv6": - return IPv6Address(s) -- elif type == u"mac": -+ elif type == "mac": - return MACAddress(s) - else: -- raise ValueError(u"Unknown address type %s." % type) -+ raise ValueError("Unknown address type %s." % type) - new = staticmethod(new) - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- elem = document.createElement(u"address") -- elem.setAttribute(u"addr", self.s) -- elem.setAttribute(u"addrtype", self.type) -+ elem = document.createElement("address") -+ elem.setAttribute("addr", self.s) -+ elem.setAttribute("addrtype", self.type) - frag.appendChild(elem) - return frag - -@@ -283,21 +283,21 @@ class Address(object): - - - class IPv4Address(Address): -- type = property(lambda self: u"ipv4") -+ type = property(lambda self: "ipv4") - - def sort_key(self): - return (0, self.s) - - - class IPv6Address(Address): -- type = property(lambda self: u"ipv6") -+ type = property(lambda self: "ipv6") - - def sort_key(self): - return (1, self.s) - - - class MACAddress(Address): -- type = property(lambda self: u"mac") -+ type = property(lambda self: "mac") - - def sort_key(self): - return (2, self.s) -@@ -316,31 +316,28 @@ class Port(object): - - def state_string(self): - if self.state is None: -- return u"unknown" -+ return "unknown" - else: -- return unicode(self.state) -+ return str(self.state) - - def spec_string(self): -- return u"%d/%s" % self.spec -+ return "%d/%s" % self.spec - - def __hash__(self): - return hash(self.spec) - -- def __cmp__(self, other): -- d = cmp(self.spec, other.spec) -- if d != 0: -- return d -- return cmp((self.spec, self.service, self.script_results), -- (other.spec, other.service, other.script_results)) -+ def __lt__(self, other): -+ return (self.spec, self.service, self.script_results) < ( -+ other.spec, other.service, other.script_results) - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- elem = document.createElement(u"port") -- elem.setAttribute(u"portid", unicode(self.spec[0])) -- elem.setAttribute(u"protocol", self.spec[1]) -+ elem = document.createElement("port") -+ elem.setAttribute("portid", str(self.spec[0])) -+ elem.setAttribute("protocol", self.spec[1]) - if self.state is not None: -- state_elem = document.createElement(u"state") -- state_elem.setAttribute(u"state", self.state) -+ state_elem = document.createElement("state") -+ state_elem.setAttribute("state", self.state) - elem.appendChild(state_elem) - elem.appendChild(self.service.to_dom_fragment(document)) - for sr in self.script_results: -@@ -384,7 +381,7 @@ class Service(object): - if len(parts) == 0: - return None - else: -- return u"/".join(parts) -+ return "/".join(parts) - - def version_string(self): - """Get a string like in the VERSION column of Nmap output.""" -@@ -394,17 +391,17 @@ class Service(object): - if self.version is not None: - parts.append(self.version) - if self.extrainfo is not None: -- parts.append(u"(%s)" % self.extrainfo) -+ parts.append("(%s)" % self.extrainfo) - - if len(parts) == 0: - return None - else: -- return u" ".join(parts) -+ return " ".join(parts) - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- elem = document.createElement(u"service") -- for attr in (u"name", u"product", u"version", u"extrainfo", u"tunnel"): -+ elem = document.createElement("service") -+ for attr in ("name", "product", "version", "extrainfo", "tunnel"): - v = getattr(self, attr) - if v is None: - continue -@@ -434,53 +431,53 @@ class ScriptResult(object): - result = [] - lines = self.output.splitlines() - if len(lines) > 0: -- lines[0] = self.id + u": " + lines[0] -+ lines[0] = self.id + ": " + lines[0] - for line in lines[:-1]: -- result.append(u"| " + line) -+ result.append("| " + line) - if len(lines) > 0: -- result.append(u"|_ " + lines[-1]) -+ result.append("|_ " + lines[-1]) - return result - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- elem = document.createElement(u"script") -- elem.setAttribute(u"id", self.id) -- elem.setAttribute(u"output", self.output) -+ elem = document.createElement("script") -+ elem.setAttribute("id", self.id) -+ elem.setAttribute("output", self.output) - frag.appendChild(elem) - return frag - - - def format_banner(scan): - """Format a startup banner more or less like Nmap does.""" -- scanner = u"Nmap" -- if scan.scanner is not None and scan.scanner != u"nmap": -+ scanner = "Nmap" -+ if scan.scanner is not None and scan.scanner != "nmap": - scanner = scan.scanner - parts = [scanner] - if scan.version is not None: - parts.append(scan.version) -- parts.append(u"scan") -+ parts.append("scan") - if scan.start_date is not None: -- parts.append(u"initiated %s" % scan.start_date.strftime( -+ parts.append("initiated %s" % scan.start_date.strftime( - "%a %b %d %H:%M:%S %Y")) - if scan.args is not None: -- parts.append(u"as: %s" % scan.args) -- return u" ".join(parts) -+ parts.append("as: %s" % scan.args) -+ return " ".join(parts) - - - def print_script_result_diffs_text(title, script_results_a, script_results_b, - script_result_diffs, f=sys.stdout): -- table = Table(u"*") -+ table = Table("*") - for sr_diff in script_result_diffs: - sr_diff.append_to_port_table(table) - if len(table) > 0: -- print >> f -+ print(file=f) - if len(script_results_b) == 0: -- print >> f, u"-%s:" % title -+ print("-%s:" % title, file=f) - elif len(script_results_a) == 0: -- print >> f, u"+%s:" % title -+ print("+%s:" % title, file=f) - else: -- print >> f, u" %s:" % title -- print >> f, table -+ print(" %s:" % title, file=f) -+ print(table, file=f) - - - def script_result_diffs_to_dom_fragment(elem, script_results_a, -@@ -488,13 +485,13 @@ def script_result_diffs_to_dom_fragment( - if len(script_results_a) == 0 and len(script_results_b) == 0: - return document.createDocumentFragment() - elif len(script_results_b) == 0: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - for sr in script_results_a: - elem.appendChild(sr.to_dom_fragment(document)) - a_elem.appendChild(elem) - return a_elem - elif len(script_results_a) == 0: -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - for sr in script_results_b: - elem.appendChild(sr.to_dom_fragment(document)) - b_elem.appendChild(elem) -@@ -579,10 +576,10 @@ class ScanDiffText(ScanDiff): - banner_a = format_banner(self.scan_a) - banner_b = format_banner(self.scan_b) - if banner_a != banner_b: -- print >> self.f, u"-%s" % banner_a -- print >> self.f, u"+%s" % banner_b -+ print("-%s" % banner_a, file=self.f) -+ print("+%s" % banner_b, file=self.f) - elif verbose: -- print >> self.f, u" %s" % banner_a -+ print(" %s" % banner_a, file=self.f) - - def output_pre_scripts(self, pre_script_result_diffs): - print_script_result_diffs_text("Pre-scan script results", -@@ -595,7 +592,7 @@ class ScanDiffText(ScanDiff): - post_script_result_diffs, self.f) - - def output_host_diff(self, h_diff): -- print >> self.f -+ print(file=self.f) - h_diff.print_text(self.f) - - def output_ending(self): -@@ -620,8 +617,8 @@ class ScanDiffXML(ScanDiff): - - def output_beginning(self): - self.writer.startDocument() -- self.writer.startElement(u"nmapdiff", {u"version": NDIFF_XML_VERSION}) -- self.writer.startElement(u"scandiff", {}) -+ self.writer.startElement("nmapdiff", {"version": NDIFF_XML_VERSION}) -+ self.writer.startElement("scandiff", {}) - - if self.nmaprun_differs(): - self.writer.frag_a( -@@ -634,7 +631,7 @@ class ScanDiffXML(ScanDiff): - - def output_pre_scripts(self, pre_script_result_diffs): - if len(pre_script_result_diffs) > 0 or verbose: -- prescript_elem = self.document.createElement(u"prescript") -+ prescript_elem = self.document.createElement("prescript") - frag = script_result_diffs_to_dom_fragment( - prescript_elem, self.scan_a.pre_script_results, - self.scan_b.pre_script_results, pre_script_result_diffs, -@@ -644,7 +641,7 @@ class ScanDiffXML(ScanDiff): - - def output_post_scripts(self, post_script_result_diffs): - if len(post_script_result_diffs) > 0 or verbose: -- postscript_elem = self.document.createElement(u"postscript") -+ postscript_elem = self.document.createElement("postscript") - frag = script_result_diffs_to_dom_fragment( - postscript_elem, self.scan_a.post_script_results, - self.scan_b.post_script_results, post_script_result_diffs, -@@ -658,8 +655,8 @@ class ScanDiffXML(ScanDiff): - frag.unlink() - - def output_ending(self): -- self.writer.endElement(u"scandiff") -- self.writer.endElement(u"nmapdiff") -+ self.writer.endElement("scandiff") -+ self.writer.endElement("nmapdiff") - self.writer.endDocument() - - -@@ -717,9 +714,9 @@ class HostDiff(object): - self.cost += os_cost - - extraports_a = tuple((count, state) -- for (state, count) in self.host_a.extraports.items()) -+ for (state, count) in list(self.host_a.extraports.items())) - extraports_b = tuple((count, state) -- for (state, count) in self.host_b.extraports.items()) -+ for (state, count) in list(self.host_b.extraports.items())) - if extraports_a != extraports_b: - self.extraports_changed = True - self.cost += 1 -@@ -745,69 +742,69 @@ class HostDiff(object): - # Names and addresses. - if self.id_changed: - if host_a.state is not None: -- print >> f, u"-%s:" % host_a.format_name() -+ print("-%s:" % host_a.format_name(), file=f) - if self.host_b.state is not None: -- print >> f, u"+%s:" % host_b.format_name() -+ print("+%s:" % host_b.format_name(), file=f) - else: -- print >> f, u" %s:" % host_a.format_name() -+ print(" %s:" % host_a.format_name(), file=f) - - # State. - if self.state_changed: - if host_a.state is not None: -- print >> f, u"-Host is %s." % host_a.state -+ print("-Host is %s." % host_a.state, file=f) - if host_b.state is not None: -- print >> f, u"+Host is %s." % host_b.state -+ print("+Host is %s." % host_b.state, file=f) - elif verbose: -- print >> f, u" Host is %s." % host_b.state -+ print(" Host is %s." % host_b.state, file=f) - - # Extraports. - if self.extraports_changed: - if len(host_a.extraports) > 0: -- print >> f, u"-Not shown: %s" % host_a.extraports_string() -+ print("-Not shown: %s" % host_a.extraports_string(), file=f) - if len(host_b.extraports) > 0: -- print >> f, u"+Not shown: %s" % host_b.extraports_string() -+ print("+Not shown: %s" % host_b.extraports_string(), file=f) - elif verbose: - if len(host_a.extraports) > 0: -- print >> f, u" Not shown: %s" % host_a.extraports_string() -+ print(" Not shown: %s" % host_a.extraports_string(), file=f) - - # Port table. -- port_table = Table(u"** * * *") -+ port_table = Table("** * * *") - if host_a.state is None: -- mark = u"+" -+ mark = "+" - elif host_b.state is None: -- mark = u"-" -+ mark = "-" - else: -- mark = u" " -- port_table.append((mark, u"PORT", u"STATE", u"SERVICE", u"VERSION")) -+ mark = " " -+ port_table.append((mark, "PORT", "STATE", "SERVICE", "VERSION")) - - for port in self.ports: - port_diff = self.port_diffs[port] - port_diff.append_to_port_table(port_table, host_a, host_b) - - if len(port_table) > 1: -- print >> f, port_table -+ print(port_table, file=f) - - # OS changes. - if self.os_changed or verbose: - if len(host_a.os) > 0: - if len(host_b.os) > 0: -- print >> f, u" OS details:" -+ print(" OS details:", file=f) - else: -- print >> f, u"-OS details:" -+ print("-OS details:", file=f) - elif len(host_b.os) > 0: -- print >> f, u"+OS details:" -+ print("+OS details:", file=f) - # os_diffs is a list of 5-tuples returned by - # difflib.SequenceMatcher. - for op, i1, i2, j1, j2 in self.os_diffs: - if op == "replace" or op == "delete": - for i in range(i1, i2): -- print >> f, "- %s" % host_a.os[i] -+ print("- %s" % host_a.os[i], file=f) - if op == "replace" or op == "insert": - for i in range(j1, j2): -- print >> f, "+ %s" % host_b.os[i] -+ print("+ %s" % host_b.os[i], file=f) - if op == "equal": - for i in range(i1, i2): -- print >> f, " %s" % host_a.os[i] -+ print(" %s" % host_a.os[i], file=f) - - print_script_result_diffs_text("Host script results", - host_a.script_results, host_b.script_results, -@@ -818,32 +815,32 @@ class HostDiff(object): - host_b = self.host_b - - frag = document.createDocumentFragment() -- hostdiff_elem = document.createElement(u"hostdiff") -+ hostdiff_elem = document.createElement("hostdiff") - frag.appendChild(hostdiff_elem) - - if host_a.state is None or host_b.state is None: - # The host is missing in one scan. Output the whole thing. - if host_a.state is not None: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - a_elem.appendChild(host_a.to_dom_fragment(document)) - hostdiff_elem.appendChild(a_elem) - elif host_b.state is not None: -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - b_elem.appendChild(host_b.to_dom_fragment(document)) - hostdiff_elem.appendChild(b_elem) - return frag - -- host_elem = document.createElement(u"host") -+ host_elem = document.createElement("host") - - # State. - if host_a.state == host_b.state: - if verbose: - host_elem.appendChild(host_a.state_to_dom_fragment(document)) - else: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - a_elem.appendChild(host_a.state_to_dom_fragment(document)) - host_elem.appendChild(a_elem) -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - b_elem.appendChild(host_b.state_to_dom_fragment(document)) - host_elem.appendChild(b_elem) - -@@ -852,31 +849,31 @@ class HostDiff(object): - addrset_b = set(host_b.addresses) - for addr in sorted(addrset_a.intersection(addrset_b)): - host_elem.appendChild(addr.to_dom_fragment(document)) -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - for addr in sorted(addrset_a - addrset_b): - a_elem.appendChild(addr.to_dom_fragment(document)) - if a_elem.hasChildNodes(): - host_elem.appendChild(a_elem) -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - for addr in sorted(addrset_b - addrset_a): - b_elem.appendChild(addr.to_dom_fragment(document)) - if b_elem.hasChildNodes(): - host_elem.appendChild(b_elem) - - # Host names. -- hostnames_elem = document.createElement(u"hostnames") -+ hostnames_elem = document.createElement("hostnames") - hostnameset_a = set(host_a.hostnames) - hostnameset_b = set(host_b.hostnames) - for hostname in sorted(hostnameset_a.intersection(hostnameset_b)): - hostnames_elem.appendChild( - host_a.hostname_to_dom_fragment(document, hostname)) -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - for hostname in sorted(hostnameset_a - hostnameset_b): - a_elem.appendChild( - host_a.hostname_to_dom_fragment(document, hostname)) - if a_elem.hasChildNodes(): - hostnames_elem.appendChild(a_elem) -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - for hostname in sorted(hostnameset_b - hostnameset_a): - b_elem.appendChild( - host_b.hostname_to_dom_fragment(document, hostname)) -@@ -885,15 +882,15 @@ class HostDiff(object): - if hostnames_elem.hasChildNodes(): - host_elem.appendChild(hostnames_elem) - -- ports_elem = document.createElement(u"ports") -+ ports_elem = document.createElement("ports") - # Extraports. - if host_a.extraports == host_b.extraports: - ports_elem.appendChild(host_a.extraports_to_dom_fragment(document)) - else: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - a_elem.appendChild(host_a.extraports_to_dom_fragment(document)) - ports_elem.appendChild(a_elem) -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - b_elem.appendChild(host_b.extraports_to_dom_fragment(document)) - ports_elem.appendChild(b_elem) - # Port list. -@@ -909,18 +906,18 @@ class HostDiff(object): - - # OS changes. - if self.os_changed or verbose: -- os_elem = document.createElement(u"os") -+ os_elem = document.createElement("os") - # os_diffs is a list of 5-tuples returned by - # difflib.SequenceMatcher. - for op, i1, i2, j1, j2 in self.os_diffs: - if op == "replace" or op == "delete": -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - for i in range(i1, i2): - a_elem.appendChild(host_a.os_to_dom_fragment( - document, host_a.os[i])) - os_elem.appendChild(a_elem) - if op == "replace" or op == "insert": -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - for i in range(j1, j2): - b_elem.appendChild(host_b.os_to_dom_fragment( - document, host_b.os[i])) -@@ -934,7 +931,7 @@ class HostDiff(object): - - # Host script changes. - if len(self.script_result_diffs) > 0 or verbose: -- hostscript_elem = document.createElement(u"hostscript") -+ hostscript_elem = document.createElement("hostscript") - host_elem.appendChild(script_result_diffs_to_dom_fragment( - hostscript_elem, host_a.script_results, - host_b.script_results, self.script_result_diffs, -@@ -987,38 +984,38 @@ class PortDiff(object): - self.port_b.service.version_string()] - if a_columns == b_columns: - if verbose or self.script_result_diffs > 0: -- table.append([u" "] + a_columns) -+ table.append([" "] + a_columns) - else: - if not host_a.is_extraports(self.port_a.state): -- table.append([u"-"] + a_columns) -+ table.append(["-"] + a_columns) - if not host_b.is_extraports(self.port_b.state): -- table.append([u"+"] + b_columns) -+ table.append(["+"] + b_columns) - - for sr_diff in self.script_result_diffs: - sr_diff.append_to_port_table(table) - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -- portdiff_elem = document.createElement(u"portdiff") -+ portdiff_elem = document.createElement("portdiff") - frag.appendChild(portdiff_elem) - if (self.port_a.spec == self.port_b.spec and - self.port_a.state == self.port_b.state): -- port_elem = document.createElement(u"port") -- port_elem.setAttribute(u"portid", unicode(self.port_a.spec[0])) -- port_elem.setAttribute(u"protocol", self.port_a.spec[1]) -+ port_elem = document.createElement("port") -+ port_elem.setAttribute("portid", str(self.port_a.spec[0])) -+ port_elem.setAttribute("protocol", self.port_a.spec[1]) - if self.port_a.state is not None: -- state_elem = document.createElement(u"state") -- state_elem.setAttribute(u"state", self.port_a.state) -+ state_elem = document.createElement("state") -+ state_elem.setAttribute("state", self.port_a.state) - port_elem.appendChild(state_elem) - if self.port_a.service == self.port_b.service: - port_elem.appendChild( - self.port_a.service.to_dom_fragment(document)) - else: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - a_elem.appendChild( - self.port_a.service.to_dom_fragment(document)) - port_elem.appendChild(a_elem) -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - b_elem.appendChild( - self.port_b.service.to_dom_fragment(document)) - port_elem.appendChild(b_elem) -@@ -1026,10 +1023,10 @@ class PortDiff(object): - port_elem.appendChild(sr_diff.to_dom_fragment(document)) - portdiff_elem.appendChild(port_elem) - else: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - a_elem.appendChild(self.port_a.to_dom_fragment(document)) - portdiff_elem.appendChild(a_elem) -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - b_elem.appendChild(self.port_b.to_dom_fragment(document)) - portdiff_elem.appendChild(b_elem) - -@@ -1084,13 +1081,13 @@ class ScriptResultDiff(object): - for op, i1, i2, j1, j2 in diffs.get_opcodes(): - if op == "replace" or op == "delete": - for k in range(i1, i2): -- table.append_raw(u"-" + a_lines[k]) -+ table.append_raw("-" + a_lines[k]) - if op == "replace" or op == "insert": - for k in range(j1, j2): -- table.append_raw(u"+" + b_lines[k]) -+ table.append_raw("+" + b_lines[k]) - if op == "equal": - for k in range(i1, i2): -- table.append_raw(u" " + a_lines[k]) -+ table.append_raw(" " + a_lines[k]) - - def to_dom_fragment(self, document): - frag = document.createDocumentFragment() -@@ -1100,11 +1097,11 @@ class ScriptResultDiff(object): - frag.appendChild(self.sr_a.to_dom_fragment(document)) - else: - if self.sr_a is not None: -- a_elem = document.createElement(u"a") -+ a_elem = document.createElement("a") - a_elem.appendChild(self.sr_a.to_dom_fragment(document)) - frag.appendChild(a_elem) - if self.sr_b is not None: -- b_elem = document.createElement(u"b") -+ b_elem = document.createElement("b") - b_elem.appendChild(self.sr_b.to_dom_fragment(document)) - frag.appendChild(b_elem) - return frag -@@ -1118,7 +1115,7 @@ class Table(object): - copied to the output.""" - self.widths = [] - self.rows = [] -- self.prefix = u"" -+ self.prefix = "" - self.padding = [] - j = 0 - while j < len(template) and template[j] != "*": -@@ -1143,7 +1140,7 @@ class Table(object): - - for i in range(len(row)): - if row[i] is None: -- s = u"" -+ s = "" - else: - s = str(row[i]) - if i == len(self.widths): -@@ -1165,7 +1162,7 @@ class Table(object): - for row in self.rows: - parts = [self.prefix] - i = 0 -- if isinstance(row, basestring): -+ if isinstance(row, str): - # A raw string. - lines.append(row) - else: -@@ -1174,13 +1171,13 @@ class Table(object): - if i < len(self.padding): - parts.append(self.padding[i]) - i += 1 -- lines.append(u"".join(parts).rstrip()) -- return u"\n".join(lines) -+ lines.append("".join(parts).rstrip()) -+ return "\n".join(lines) - - - def warn(str): - """Print a warning to stderr.""" -- print >> sys.stderr, str -+ print(str, file=sys.stderr) - - - class NmapContentHandler(xml.sax.handler.ContentHandler): -@@ -1200,24 +1197,24 @@ class NmapContentHandler(xml.sax.handler - self.skip_over = False - - self._start_elem_handlers = { -- u"nmaprun": self._start_nmaprun, -- u"host": self._start_host, -- u"hosthint": self._start_hosthint, -- u"status": self._start_status, -- u"address": self._start_address, -- u"hostname": self._start_hostname, -- u"extraports": self._start_extraports, -- u"port": self._start_port, -- u"state": self._start_state, -- u"service": self._start_service, -- u"script": self._start_script, -- u"osmatch": self._start_osmatch, -- u"finished": self._start_finished, -+ "nmaprun": self._start_nmaprun, -+ "host": self._start_host, -+ "hosthint": self._start_hosthint, -+ "status": self._start_status, -+ "address": self._start_address, -+ "hostname": self._start_hostname, -+ "extraports": self._start_extraports, -+ "port": self._start_port, -+ "state": self._start_state, -+ "service": self._start_service, -+ "script": self._start_script, -+ "osmatch": self._start_osmatch, -+ "finished": self._start_finished, - } - self._end_elem_handlers = { -- u'host': self._end_host, -- u"hosthint": self._end_hosthint, -- u'port': self._end_port, -+ 'host': self._end_host, -+ "hosthint": self._end_hosthint, -+ 'port': self._end_port, - } - - def parent_element(self): -@@ -1247,72 +1244,72 @@ class NmapContentHandler(xml.sax.handler - def _start_nmaprun(self, name, attrs): - assert self.parent_element() is None - if "start" in attrs: -- start_timestamp = int(attrs.get(u"start")) -+ start_timestamp = int(attrs.get("start")) - self.scan.start_date = datetime.datetime.fromtimestamp( - start_timestamp) -- self.scan.scanner = attrs.get(u"scanner") -- self.scan.args = attrs.get(u"args") -- self.scan.version = attrs.get(u"version") -+ self.scan.scanner = attrs.get("scanner") -+ self.scan.args = attrs.get("args") -+ self.scan.version = attrs.get("version") - - def _start_host(self, name, attrs): -- assert self.parent_element() == u"nmaprun" -+ assert self.parent_element() == "nmaprun" - self.current_host = Host() - self.scan.hosts.append(self.current_host) - - def _start_hosthint(self, name, attrs): -- assert self.parent_element() == u"nmaprun" -+ assert self.parent_element() == "nmaprun" - self.skip_over = True - - def _start_status(self, name, attrs): -- assert self.parent_element() == u"host" -+ assert self.parent_element() == "host" - assert self.current_host is not None -- state = attrs.get(u"state") -+ state = attrs.get("state") - if state is None: -- warn(u'%s element of host %s is missing the "state" attribute; ' -- 'assuming \unknown\.' % ( -+ warn('%s element of host %s is missing the "state" attribute; ' -+ r'assuming \unknown\.' % ( - name, self.current_host.format_name())) - return - self.current_host.state = state - - def _start_address(self, name, attrs): -- assert self.parent_element() == u"host" -+ assert self.parent_element() == "host" - assert self.current_host is not None -- addr = attrs.get(u"addr") -+ addr = attrs.get("addr") - if addr is None: -- warn(u'%s element of host %s is missing the "addr" ' -+ warn('%s element of host %s is missing the "addr" ' - 'attribute; skipping.' % ( - name, self.current_host.format_name())) - return -- addrtype = attrs.get(u"addrtype", u"ipv4") -+ addrtype = attrs.get("addrtype", "ipv4") - self.current_host.add_address(Address.new(addrtype, addr)) - - def _start_hostname(self, name, attrs): -- assert self.parent_element() == u"hostnames" -+ assert self.parent_element() == "hostnames" - assert self.current_host is not None -- hostname = attrs.get(u"name") -+ hostname = attrs.get("name") - if hostname is None: -- warn(u'%s element of host %s is missing the "name" ' -+ warn('%s element of host %s is missing the "name" ' - 'attribute; skipping.' % ( - name, self.current_host.format_name())) - return - self.current_host.add_hostname(hostname) - - def _start_extraports(self, name, attrs): -- assert self.parent_element() == u"ports" -+ assert self.parent_element() == "ports" - assert self.current_host is not None -- state = attrs.get(u"state") -+ state = attrs.get("state") - if state is None: -- warn(u'%s element of host %s is missing the "state" ' -+ warn('%s element of host %s is missing the "state" ' - 'attribute; assuming "unknown".' % ( - name, self.current_host.format_name())) - state = None - if state in self.current_host.extraports: -- warn(u'Duplicate extraports state "%s" in host %s.' % ( -+ warn('Duplicate extraports state "%s" in host %s.' % ( - state, self.current_host.format_name())) - -- count = attrs.get(u"count") -+ count = attrs.get("count") - if count is None: -- warn(u'%s element of host %s is missing the "count" ' -+ warn('%s element of host %s is missing the "count" ' - 'attribute; assuming 0.' % ( - name, self.current_host.format_name())) - count = 0 -@@ -1320,99 +1317,99 @@ class NmapContentHandler(xml.sax.handler - try: - count = int(count) - except ValueError: -- warn(u"Can't convert extraports count \"%s\" " -+ warn("Can't convert extraports count \"%s\" " - "to an integer in host %s; assuming 0." % ( -- attrs[u"count"], self.current_host.format_name())) -+ attrs["count"], self.current_host.format_name())) - count = 0 - self.current_host.extraports[state] = count - - def _start_port(self, name, attrs): -- assert self.parent_element() == u"ports" -+ assert self.parent_element() == "ports" - assert self.current_host is not None -- portid_str = attrs.get(u"portid") -+ portid_str = attrs.get("portid") - if portid_str is None: -- warn(u'%s element of host %s missing the "portid" ' -+ warn('%s element of host %s missing the "portid" ' - 'attribute; skipping.' % ( - name, self.current_host.format_name())) - return - try: - portid = int(portid_str) - except ValueError: -- warn(u"Can't convert portid \"%s\" to an integer " -+ warn("Can't convert portid \"%s\" to an integer " - "in host %s; skipping port." % ( - portid_str, self.current_host.format_name())) - return -- protocol = attrs.get(u"protocol") -+ protocol = attrs.get("protocol") - if protocol is None: -- warn(u'%s element of host %s missing the "protocol" ' -+ warn('%s element of host %s missing the "protocol" ' - 'attribute; skipping.' % ( - name, self.current_host.format_name())) - return - self.current_port = Port((portid, protocol)) - - def _start_state(self, name, attrs): -- assert self.parent_element() == u"port" -+ assert self.parent_element() == "port" - assert self.current_host is not None - if self.current_port is None: - return - if "state" not in attrs: -- warn(u'%s element of port %s is missing the "state" ' -+ warn('%s element of port %s is missing the "state" ' - 'attribute; assuming "unknown".' % ( - name, self.current_port.spec_string())) - return -- self.current_port.state = attrs[u"state"] -+ self.current_port.state = attrs["state"] - self.current_host.add_port(self.current_port) - - def _start_service(self, name, attrs): -- assert self.parent_element() == u"port" -+ assert self.parent_element() == "port" - assert self.current_host is not None - if self.current_port is None: - return -- self.current_port.service.name = attrs.get(u"name") -- self.current_port.service.product = attrs.get(u"product") -- self.current_port.service.version = attrs.get(u"version") -- self.current_port.service.extrainfo = attrs.get(u"extrainfo") -- self.current_port.service.tunnel = attrs.get(u"tunnel") -+ self.current_port.service.name = attrs.get("name") -+ self.current_port.service.product = attrs.get("product") -+ self.current_port.service.version = attrs.get("version") -+ self.current_port.service.extrainfo = attrs.get("extrainfo") -+ self.current_port.service.tunnel = attrs.get("tunnel") - - def _start_script(self, name, attrs): - result = ScriptResult() -- result.id = attrs.get(u"id") -+ result.id = attrs.get("id") - if result.id is None: -- warn(u'%s element missing the "id" attribute; skipping.' % name) -+ warn('%s element missing the "id" attribute; skipping.' % name) - return - -- result.output = attrs.get(u"output") -+ result.output = attrs.get("output") - if result.output is None: -- warn(u'%s element missing the "output" attribute; skipping.' -+ warn('%s element missing the "output" attribute; skipping.' - % name) - return -- if self.parent_element() == u"prescript": -+ if self.parent_element() == "prescript": - self.scan.pre_script_results.append(result) -- elif self.parent_element() == u"postscript": -+ elif self.parent_element() == "postscript": - self.scan.post_script_results.append(result) -- elif self.parent_element() == u"hostscript": -+ elif self.parent_element() == "hostscript": - self.current_host.script_results.append(result) -- elif self.parent_element() == u"port": -+ elif self.parent_element() == "port": - self.current_port.script_results.append(result) - else: -- warn(u"%s element not inside prescript, postscript, hostscript, " -+ warn("%s element not inside prescript, postscript, hostscript, " - "or port element; ignoring." % name) - return - - def _start_osmatch(self, name, attrs): -- assert self.parent_element() == u"os" -+ assert self.parent_element() == "os" - assert self.current_host is not None - if "name" not in attrs: -- warn(u'%s element of host %s is missing the "name" ' -+ warn('%s element of host %s is missing the "name" ' - 'attribute; skipping.' % ( - name, self.current_host.format_name())) - return -- self.current_host.os.append(attrs[u"name"]) -+ self.current_host.os.append(attrs["name"]) - - def _start_finished(self, name, attrs): -- assert self.parent_element() == u"runstats" -+ assert self.parent_element() == "runstats" - if "time" in attrs: -- end_timestamp = int(attrs.get(u"time")) -+ end_timestamp = int(attrs.get("time")) - self.scan.end_date = datetime.datetime.fromtimestamp(end_timestamp) - - def _end_host(self, name): -@@ -1434,23 +1431,23 @@ class XMLWriter (xml.sax.saxutils.XMLGen - - def frag(self, frag): - for node in frag.childNodes: -- node.writexml(self.f, newl=u"\n") -+ node.writexml(self.f, newl="\n") - - def frag_a(self, frag): -- self.startElement(u"a", {}) -+ self.startElement("a", {}) - for node in frag.childNodes: -- node.writexml(self.f, newl=u"\n") -- self.endElement(u"a") -+ node.writexml(self.f, newl="\n") -+ self.endElement("a") - - def frag_b(self, frag): -- self.startElement(u"b", {}) -+ self.startElement("b", {}) - for node in frag.childNodes: -- node.writexml(self.f, newl=u"\n") -- self.endElement(u"b") -+ node.writexml(self.f, newl="\n") -+ self.endElement("b") - - - def usage(): -- print u"""\ -+ print("""\ - Usage: %s [option] FILE1 FILE2 - Compare two Nmap XML files and display a list of their differences. - Differences include host state changes, port state changes, and changes to -@@ -1460,7 +1457,7 @@ service and OS detection. - -v, --verbose also show hosts and ports that haven't changed. - --text display output in text format (default) - --xml display output in XML format\ --""" % sys.argv[0] -+""" % sys.argv[0]) - - EXIT_EQUAL = 0 - EXIT_DIFFERENT = 1 -@@ -1468,8 +1465,8 @@ EXIT_ERROR = 2 - - - def usage_error(msg): -- print >> sys.stderr, u"%s: %s" % (sys.argv[0], msg) -- print >> sys.stderr, u"Try '%s -h' for help." % sys.argv[0] -+ print("%s: %s" % (sys.argv[0], msg), file=sys.stderr) -+ print("Try '%s -h' for help." % sys.argv[0], file=sys.stderr) - sys.exit(EXIT_ERROR) - - -@@ -1480,7 +1477,7 @@ def main(): - try: - opts, input_filenames = getopt.gnu_getopt( - sys.argv[1:], "hv", ["help", "text", "verbose", "xml"]) -- except getopt.GetoptError, e: -+ except getopt.GetoptError as e: - usage_error(e.msg) - for o, a in opts: - if o == "-h" or o == "--help": -@@ -1490,15 +1487,15 @@ def main(): - verbose = True - elif o == "--text": - if output_format is not None and output_format != "text": -- usage_error(u"contradictory output format options.") -+ usage_error("contradictory output format options.") - output_format = "text" - elif o == "--xml": - if output_format is not None and output_format != "xml": -- usage_error(u"contradictory output format options.") -+ usage_error("contradictory output format options.") - output_format = "xml" - - if len(input_filenames) != 2: -- usage_error(u"need exactly two input filenames.") -+ usage_error("need exactly two input filenames.") - - if output_format is None: - output_format = "text" -@@ -1511,8 +1508,8 @@ def main(): - scan_a.load_from_file(filename_a) - scan_b = Scan() - scan_b.load_from_file(filename_b) -- except IOError, e: -- print >> sys.stderr, u"Can't open file: %s" % str(e) -+ except IOError as e: -+ print("Can't open file: %s" % str(e), file=sys.stderr) - sys.exit(EXIT_ERROR) - - if output_format == "text": ---- a/ndiff/ndifftest.py -+++ b/ndiff/ndifftest.py -@@ -1,4 +1,4 @@ --#!/usr/bin/env python -+#!/usr/bin/env python3 - - # Unit tests for Ndiff. - -@@ -22,7 +22,7 @@ for x in dir(ndiff): - sys.dont_write_bytecode = dont_write_bytecode - del dont_write_bytecode - --import StringIO -+import io - - - class scan_test(unittest.TestCase): -@@ -52,7 +52,7 @@ class scan_test(unittest.TestCase): - scan.load_from_file("test-scans/single.xml") - host = scan.hosts[0] - self.assertEqual(len(host.ports), 5) -- self.assertEqual(host.extraports.items(), [("filtered", 95)]) -+ self.assertEqual(list(host.extraports.items()), [("filtered", 95)]) - - def test_extraports_multi(self): - """Test that the correct number of known ports is returned when there -@@ -68,9 +68,9 @@ class scan_test(unittest.TestCase): - """Test that nmaprun information is recorded.""" - scan = Scan() - scan.load_from_file("test-scans/empty.xml") -- self.assertEqual(scan.scanner, u"nmap") -- self.assertEqual(scan.version, u"4.90RC2") -- self.assertEqual(scan.args, u"nmap -oX empty.xml -p 1-100") -+ self.assertEqual(scan.scanner, "nmap") -+ self.assertEqual(scan.version, "4.90RC2") -+ self.assertEqual(scan.args, "nmap -oX empty.xml -p 1-100") - - def test_addresses(self): - """Test that addresses are recorded.""" -@@ -84,7 +84,7 @@ class scan_test(unittest.TestCase): - scan = Scan() - scan.load_from_file("test-scans/simple.xml") - host = scan.hosts[0] -- self.assertEqual(host.hostnames, [u"scanme.nmap.org"]) -+ self.assertEqual(host.hostnames, ["scanme.nmap.org"]) - - def test_os(self): - """Test that OS information is recorded.""" -@@ -99,7 +99,7 @@ class scan_test(unittest.TestCase): - scan.load_from_file("test-scans/complex.xml") - host = scan.hosts[0] - self.assertTrue(len(host.script_results) > 0) -- self.assertTrue(len(host.ports[(22, u"tcp")].script_results) > 0) -+ self.assertTrue(len(host.ports[(22, "tcp")].script_results) > 0) - - # This test is commented out because Nmap XML doesn't store any information - # about down hosts, not even the fact that they are down. Recovering the list -@@ -128,16 +128,16 @@ class host_test(unittest.TestCase): - - def test_format_name(self): - h = Host() -- self.assertTrue(isinstance(h.format_name(), basestring)) -- h.add_address(IPv4Address(u"127.0.0.1")) -- self.assertTrue(u"127.0.0.1" in h.format_name()) -+ self.assertTrue(isinstance(h.format_name(), str)) -+ h.add_address(IPv4Address("127.0.0.1")) -+ self.assertTrue("127.0.0.1" in h.format_name()) - h.add_address(IPv6Address("::1")) -- self.assertTrue(u"127.0.0.1" in h.format_name()) -- self.assertTrue(u"::1" in h.format_name()) -- h.add_hostname(u"localhost") -- self.assertTrue(u"127.0.0.1" in h.format_name()) -- self.assertTrue(u"::1" in h.format_name()) -- self.assertTrue(u"localhost" in h.format_name()) -+ self.assertTrue("127.0.0.1" in h.format_name()) -+ self.assertTrue("::1" in h.format_name()) -+ h.add_hostname("localhost") -+ self.assertTrue("127.0.0.1" in h.format_name()) -+ self.assertTrue("::1" in h.format_name()) -+ self.assertTrue("localhost" in h.format_name()) - - def test_empty_get_port(self): - h = Host() -@@ -197,8 +197,8 @@ class host_test(unittest.TestCase): - h = s.hosts[0] - self.assertEqual(len(h.ports), 5) - self.assertEqual(len(h.extraports), 1) -- self.assertEqual(h.extraports.keys()[0], u"filtered") -- self.assertEqual(h.extraports.values()[0], 95) -+ self.assertEqual(list(h.extraports.keys())[0], "filtered") -+ self.assertEqual(list(h.extraports.values())[0], 95) - self.assertEqual(h.state, "up") - - -@@ -241,13 +241,13 @@ class port_test(unittest.TestCase): - """Test the Port class.""" - def test_spec_string(self): - p = Port((10, "tcp")) -- self.assertEqual(p.spec_string(), u"10/tcp") -+ self.assertEqual(p.spec_string(), "10/tcp") - p = Port((100, "ip")) -- self.assertEqual(p.spec_string(), u"100/ip") -+ self.assertEqual(p.spec_string(), "100/ip") - - def test_state_string(self): - p = Port((10, "tcp")) -- self.assertEqual(p.state_string(), u"unknown") -+ self.assertEqual(p.state_string(), "unknown") - - - class service_test(unittest.TestCase): -@@ -255,47 +255,47 @@ class service_test(unittest.TestCase): - def test_compare(self): - """Test that services with the same contents compare equal.""" - a = Service() -- a.name = u"ftp" -- a.product = u"FooBar FTP" -- a.version = u"1.1.1" -- a.tunnel = u"ssl" -+ a.name = "ftp" -+ a.product = "FooBar FTP" -+ a.version = "1.1.1" -+ a.tunnel = "ssl" - self.assertEqual(a, a) - b = Service() -- b.name = u"ftp" -- b.product = u"FooBar FTP" -- b.version = u"1.1.1" -- b.tunnel = u"ssl" -+ b.name = "ftp" -+ b.product = "FooBar FTP" -+ b.version = "1.1.1" -+ b.tunnel = "ssl" - self.assertEqual(a, b) -- b.name = u"http" -+ b.name = "http" - self.assertNotEqual(a, b) - c = Service() - self.assertNotEqual(a, c) - - def test_tunnel(self): - serv = Service() -- serv.name = u"http" -- serv.tunnel = u"ssl" -- self.assertEqual(serv.name_string(), u"ssl/http") -+ serv.name = "http" -+ serv.tunnel = "ssl" -+ self.assertEqual(serv.name_string(), "ssl/http") - - def test_version_string(self): - serv = Service() -- serv.product = u"FooBar" -+ serv.product = "FooBar" - self.assertTrue(len(serv.version_string()) > 0) - serv = Service() -- serv.version = u"1.2.3" -+ serv.version = "1.2.3" - self.assertTrue(len(serv.version_string()) > 0) - serv = Service() -- serv.extrainfo = u"misconfigured" -+ serv.extrainfo = "misconfigured" - self.assertTrue(len(serv.version_string()) > 0) - serv = Service() -- serv.product = u"FooBar" -- serv.version = u"1.2.3" -+ serv.product = "FooBar" -+ serv.version = "1.2.3" - # Must match Nmap output. - self.assertEqual(serv.version_string(), -- u"%s %s" % (serv.product, serv.version)) -- serv.extrainfo = u"misconfigured" -+ "%s %s" % (serv.product, serv.version)) -+ serv.extrainfo = "misconfigured" - self.assertEqual(serv.version_string(), -- u"%s %s (%s)" % (serv.product, serv.version, serv.extrainfo)) -+ "%s %s (%s)" % (serv.product, serv.version, serv.extrainfo)) - - - class ScanDiffSub(ScanDiff): -@@ -703,7 +703,7 @@ class scan_diff_xml_test(unittest.TestCa - a.load_from_file("test-scans/empty.xml") - b = Scan() - b.load_from_file("test-scans/simple.xml") -- f = StringIO.StringIO() -+ f = io.StringIO() - self.scan_diff = ScanDiffXML(a, b, f) - self.scan_diff.output() - self.xml = f.getvalue() -@@ -712,8 +712,8 @@ class scan_diff_xml_test(unittest.TestCa - def test_well_formed(self): - try: - document = xml.dom.minidom.parseString(self.xml) -- except Exception, e: -- self.fail(u"Parsing XML diff output caused the exception: %s" -+ except Exception as e: -+ self.fail("Parsing XML diff output caused the exception: %s" - % str(e)) - - -@@ -739,8 +739,8 @@ def host_apply_diff(host, diff): - host.os = diff.host_b.os[:] - - if diff.extraports_changed: -- for state in host.extraports.keys(): -- for port in host.ports.values(): -+ for state in list(host.extraports.keys()): -+ for port in list(host.ports.values()): - if port.state == state: - del host.ports[port.spec] - host.extraports = diff.host_b.extraports.copy() ---- a/ndiff/scripts/ndiff -+++ b/ndiff/scripts/ndiff -@@ -1,4 +1,4 @@ --#!/usr/bin/env python -+#!/usr/bin/env python3 - - # Ndiff - # -@@ -66,15 +66,15 @@ if INSTALL_LIB is not None and is_secure - - try: - import ndiff --except ImportError, e: -- print >> sys.stderr, """\ -+except ImportError as e: -+ print("""\ - Could not import the ndiff module: %s. --I checked in these directories:""" % repr(e.message) -+I checked in these directories:""" % repr(e), file=sys.stderr) - for dir in sys.path: -- print >> sys.stderr, " %s" % dir -- print >> sys.stderr, """\ -+ print(" %s" % dir, file=sys.stderr) -+ print("""\ - If you installed Ndiff in another directory, you may have to add the --modules directory to the PYTHONPATH environment variable.""" -+modules directory to the PYTHONPATH environment variable.""", file=sys.stderr) - sys.exit(1) - - import ndiff ---- a/ndiff/setup.py -+++ b/ndiff/setup.py -@@ -1,4 +1,4 @@ --#!/usr/bin/env python -+#!/usr/bin/env python3 - - import errno - import sys -@@ -94,7 +94,7 @@ class checked_install(distutils.command. - self.saved_prefix = sys.prefix - try: - distutils.command.install.install.finalize_options(self) -- except distutils.errors.DistutilsPlatformError, e: -+ except distutils.errors.DistutilsPlatformError as e: - raise distutils.errors.DistutilsPlatformError(str(e) + """ - Installing your distribution's python-dev package may solve this problem.""") - -@@ -152,16 +152,16 @@ Installing your distribution's python-de - self.install_scripts, "uninstall_" + APP_NAME) - - uninstaller = """\ --#!/usr/bin/env python -+#!/usr/bin/env python3 - import errno, os, os.path, sys - --print 'Uninstall %(name)s' -+print('Uninstall %(name)s') - - answer = raw_input('Are you sure that you want to uninstall ' - '%(name)s (yes/no) ') - - if answer != 'yes' and answer != 'y': -- print 'Not uninstalling.' -+ print('Not uninstalling.') - sys.exit(0) - - """ % {'name': APP_NAME} -@@ -177,8 +177,8 @@ if answer != 'yes' and answer != 'y': - # This should never happen (everything gets installed - # inside the root), but if it does, be safe and don't - # delete anything. -- uninstaller += ("print '%s was not installed inside " -- "the root %s; skipping.'\n" % (output, self.root)) -+ uninstaller += ("print('%s was not installed inside " -+ "the root %s; skipping.')\n" % (output, self.root)) - continue - output = path_strip_prefix(output, self.root) - assert os.path.isabs(output) -@@ -202,24 +202,24 @@ for path in INSTALLED_FILES: - dirs.append(path) - # Delete the files. - for file in files: -- print "Removing '%s'." % file -+ print("Removing '%s'." % file) - try: - os.remove(file) -- except OSError, e: -- print >> sys.stderr, ' Error: %s.' % str(e) -+ except OSError as e: -+ print(' Error: %s.' % str(e), file=sys.stderr) - # Delete the directories. First reverse-sort the normalized paths by - # length so that child directories are deleted before their parents. - dirs = [os.path.normpath(dir) for dir in dirs] - dirs.sort(key = len, reverse = True) - for dir in dirs: - try: -- print "Removing the directory '%s'." % dir -+ print("Removing the directory '%s'." % dir) - os.rmdir(dir) -- except OSError, e: -+ except OSError as e: - if e.errno == errno.ENOTEMPTY: -- print "Directory '%s' not empty; not removing." % dir -+ print("Directory '%s' not empty; not removing." % dir) - else: -- print >> sys.stderr, str(e) -+ print(str(e), file=sys.stderr) - """ - - uninstaller_file = open(uninstaller_filename, 'w') -@@ -227,7 +227,7 @@ for dir in dirs: - uninstaller_file.close() - - # Set exec bit for uninstaller -- mode = ((os.stat(uninstaller_filename)[ST_MODE]) | 0555) & 07777 -+ mode = ((os.stat(uninstaller_filename)[ST_MODE]) | 0o555) & 0o7777 - os.chmod(uninstaller_filename, mode) - - def write_installed_files(self): -@@ -241,7 +241,7 @@ for dir in dirs: - with open(INSTALLED_FILES_NAME, "w") as f: - for output in self.get_installed_files(): - assert "\n" not in output -- print >> f, output -+ print(output, file=f) - - - class my_uninstall(distutils.cmd.Command): -@@ -263,7 +263,7 @@ class my_uninstall(distutils.cmd.Command - # Read the list of installed files. - try: - f = open(INSTALLED_FILES_NAME, "r") -- except IOError, e: -+ except IOError as e: - if e.errno == errno.ENOENT: - log.error("Couldn't open the installation record '%s'. " - "Have you installed yet?" % INSTALLED_FILES_NAME) -@@ -286,7 +286,7 @@ class my_uninstall(distutils.cmd.Command - try: - if not self.dry_run: - os.remove(file) -- except OSError, e: -+ except OSError as e: - log.error(str(e)) - # Delete the directories. First reverse-sort the normalized paths by - # length so that child directories are deleted before their parents. -@@ -297,16 +297,16 @@ class my_uninstall(distutils.cmd.Command - log.info("Removing the directory '%s'." % dir) - if not self.dry_run: - os.rmdir(dir) -- except OSError, e: -+ except OSError as e: - if e.errno == errno.ENOTEMPTY: - log.info("Directory '%s' not empty; not removing." % dir) - else: - log.error(str(e)) - - --distutils.core.setup(name=u"ndiff", scripts=[u"scripts/ndiff"], -- py_modules=[u"ndiff"], -- data_files=[(u"share/man/man1", [u"docs/ndiff.1"])], -+distutils.core.setup(name="ndiff", scripts=["scripts/ndiff"], -+ py_modules=["ndiff"], -+ data_files=[("share/man/man1", ["docs/ndiff.1"])], - cmdclass={ - "install_egg_info": null_command, - "install": checked_install, ---- a/ndiff/test-scans/anonymize.py -+++ b/ndiff/test-scans/anonymize.py -@@ -1,4 +1,4 @@ --#!/usr/bin/env python -+#!/usr/bin/env python3 - - # Anonymize an Nmap XML file, replacing host name and IP addresses with random - # anonymous ones. Anonymized names will be consistent between runs of the -@@ -20,20 +20,20 @@ r = random.Random() - - - def hash(s): -- digest = hashlib.sha512(s).hexdigest() -+ digest = hashlib.sha512(s.encode()).hexdigest() - return int(digest, 16) - - - def anonymize_mac_address(addr): - r.seed(hash(addr)) - nums = (0, 0, 0) + tuple(r.randrange(256) for i in range(3)) -- return u":".join(u"%02X" % x for x in nums) -+ return ":".join("%02X" % x for x in nums) - - - def anonymize_ipv4_address(addr): - r.seed(hash(addr)) - nums = (10,) + tuple(r.randrange(256) for i in range(3)) -- return u".".join(unicode(x) for x in nums) -+ return ".".join(str(x) for x in nums) - - - def anonymize_ipv6_address(addr): -@@ -41,7 +41,7 @@ def anonymize_ipv6_address(addr): - # RFC 4193. - nums = (0xFD00 + r.randrange(256),) - nums = nums + tuple(r.randrange(65536) for i in range(7)) -- return u":".join("%04X" % x for x in nums) -+ return ":".join("%04X" % x for x in nums) - - # Maps to memoize address and host name conversions. - hostname_map = {} -@@ -54,11 +54,11 @@ def anonymize_hostname(name): - LETTERS = "acbdefghijklmnopqrstuvwxyz" - r.seed(hash(name)) - length = r.randrange(5, 10) -- prefix = u"".join(r.sample(LETTERS, length)) -+ prefix = "".join(r.sample(LETTERS, length)) - num = r.randrange(1000) -- hostname_map[name] = u"%s-%d.example.com" % (prefix, num) -+ hostname_map[name] = "%s-%d.example.com" % (prefix, num) - if VERBOSE: -- print >> sys.stderr, "Replace %s with %s" % (name, hostname_map[name]) -+ print("Replace %s with %s" % (name, hostname_map[name]), file=sys.stderr) - return hostname_map[name] - - mac_re = re.compile(r'\b([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}\b') -@@ -78,7 +78,7 @@ def anonymize_address(addr): - else: - assert False - if VERBOSE: -- print >> sys.stderr, "Replace %s with %s" % (addr, address_map[addr]) -+ print("Replace %s with %s" % (addr, address_map[addr]), file=sys.stderr) - return address_map[addr] - - From 8e2fc52e63ff9d3315d014784ce4015192e3a018 Mon Sep 17 00:00:00 2001 From: Christian Marangi Date: Wed, 25 Oct 2023 05:51:57 +0200 Subject: [PATCH 16/20] nmap: use git as source and bump to PCRE2 support commit Use git as source and bump version to PCRE2 support commit. Move nmap to PCRE2 library as PCRE is EOL and won't receive any security update in the future. Patch 001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch has been merged upstream and can be dropped. Signed-off-by: Christian Marangi --- net/nmap/Makefile | 23 ++++++++---- ...HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch | 35 ------------------- 2 files changed, 16 insertions(+), 42 deletions(-) delete mode 100644 net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch diff --git a/net/nmap/Makefile b/net/nmap/Makefile index d13dad50d9..d27e0b4093 100644 --- a/net/nmap/Makefile +++ b/net/nmap/Makefile @@ -13,13 +13,22 @@ include $(TOPDIR)/rules.mk PKG_NAME:=nmap -PKG_VERSION:=7.94 +# PKG_VERSION:=7.94 PKG_RELEASE:=1 PKG_MAINTAINER:=Nuno Gonçalves -PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 -PKG_SOURCE_URL:=https://nmap.org/dist/ -PKG_HASH:=d71be189eec43d7e099bac8571509d316c4577ca79491832ac3e1217bc8f92cc +# Restore PKG_VERSION and revert to using release as soon as NMAP publish a new +# version that supports PCRE2. +# PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 +# PKG_SOURCE_URL:=https://nmap.org/dist/ +# PKG_HASH:=d71be189eec43d7e099bac8571509d316c4577ca79491832ac3e1217bc8f92cc + +PKG_SOURCE_PROTO:=git +PKG_SOURCE_URL=https://github.com/nmap/nmap +PKG_SOURCE_DATE:=2023-08-28 +PKG_SOURCE_VERSION:=7dcea0187a9b8bccd552487de91512c97c791e3d +PKG_MIRROR_HASH:=3a086013df4759f394c93a23254689fddd2dcbb06574d4898ea276bdabdf5bff + PKG_LICENSE:=NPSL-0.94-or-NPSL-0.95 PKG_LICENSE_FILES:=LICENSE PKG_CPE_ID:=cpe:/a:nmap:nmap @@ -32,7 +41,7 @@ PYTHON3_PKG_FORCE_DISTUTILS_SETUP:=1 include $(INCLUDE_DIR)/package.mk include ../../lang/python/python3-package.mk -NMAP_DEPENDS:=+libpcap +libstdcpp +zlib +libpcre +NMAP_DEPENDS:=+libpcap +libstdcpp +zlib +libpcre2 NCAT_DEPENDS:=+libpcap NPING_DEPENDS:=+libpcap +libpthread +libstdcpp SSL_DEPENDS:=+libopenssl +ca-certs @@ -60,7 +69,7 @@ endef define Package/nmap-full $(call Package/nmap/default) - DEPENDS:=$(NMAP_DEPENDS) +libopenssl +liblua5.4 +libssh2 + DEPENDS:=$(NMAP_DEPENDS) $(SSL_DEPENDS) +liblua5.4 +libssh2 VARIANT:=full TITLE:=Nmap (with OpenSSL and scripting support) endef @@ -81,7 +90,7 @@ endef define Package/ncat-full $(call Package/nmap/default) - DEPENDS:=$(NCAT_DEPENDS) +libopenssl +liblua5.4 + DEPENDS:=$(NCAT_DEPENDS) $(SSL_DEPENDS) +liblua5.4 VARIANT:=full TITLE:=Ncat (with OpenSSL and scripting support) endef diff --git a/net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch b/net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch deleted file mode 100644 index fbac9bf024..0000000000 --- a/net/nmap/patches/001-Use-correct-HAVE_-macros-for-Lua-5.4.-Fixes-2648.patch +++ /dev/null @@ -1,35 +0,0 @@ -From b9263f056ab3acd666d25af84d399410560d48ac Mon Sep 17 00:00:00 2001 -From: dmiller -Date: Tue, 30 May 2023 18:33:07 +0000 -Subject: [PATCH] Use correct HAVE_ macros for Lua 5.4. Fixes #2648 - ---- - ncat/config.h.in | 4 ++-- - nmap_config.h.in | 4 ++-- - 2 files changed, 4 insertions(+), 4 deletions(-) - ---- a/ncat/config.h.in -+++ b/ncat/config.h.in -@@ -191,7 +191,7 @@ - - /* Defines for locating Lua */ - #undef LUA_INCLUDED --#undef HAVE_LUA5_3_LUA_H --#undef HAVE_LUA_5_3_LUA_H -+#undef HAVE_LUA5_4_LUA_H -+#undef HAVE_LUA_5_4_LUA_H - #undef HAVE_LUA_H - #undef HAVE_LUA_LUA_H ---- a/nmap_config.h.in -+++ b/nmap_config.h.in -@@ -172,8 +172,8 @@ extern "C" int gethostname (char *, unsi - #undef HAVE_PCAP_SET_IMMEDIATE_MODE - - /* Various possibilities for lua.h */ --#undef HAVE_LUA5_3_LUA_H --#undef HAVE_LUA_5_3_LUA_H -+#undef HAVE_LUA5_4_LUA_H -+#undef HAVE_LUA_5_4_LUA_H - #undef HAVE_LUA_H - #undef HAVE_LUA_LUA_H - From 951779447ab227c073dfb8cf1229b494c356c071 Mon Sep 17 00:00:00 2001 From: Josef Schlehofer Date: Sat, 27 Apr 2024 12:19:45 +0200 Subject: [PATCH 17/20] nmap: update to version 7.95 - Remove patch 010-Build-based-on-OpenSSL-version.patch since it was backported and now it is included in 7.95 release - Patch 030-ncat-drop-ca-bundle.patch was refreshed Release notes: https://nmap.org/changelog.html#7.95 Signed-off-by: Josef Schlehofer --- net/nmap/Makefile | 16 +- .../010-Build-based-on-OpenSSL-version.patch | 295 ------------------ .../patches/030-ncat-drop-ca-bundle.patch | 4 +- 3 files changed, 6 insertions(+), 309 deletions(-) delete mode 100644 net/nmap/patches/010-Build-based-on-OpenSSL-version.patch diff --git a/net/nmap/Makefile b/net/nmap/Makefile index d27e0b4093..c93214cf2a 100644 --- a/net/nmap/Makefile +++ b/net/nmap/Makefile @@ -13,21 +13,13 @@ include $(TOPDIR)/rules.mk PKG_NAME:=nmap -# PKG_VERSION:=7.94 +PKG_VERSION:=7.95 PKG_RELEASE:=1 PKG_MAINTAINER:=Nuno Gonçalves -# Restore PKG_VERSION and revert to using release as soon as NMAP publish a new -# version that supports PCRE2. -# PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 -# PKG_SOURCE_URL:=https://nmap.org/dist/ -# PKG_HASH:=d71be189eec43d7e099bac8571509d316c4577ca79491832ac3e1217bc8f92cc - -PKG_SOURCE_PROTO:=git -PKG_SOURCE_URL=https://github.com/nmap/nmap -PKG_SOURCE_DATE:=2023-08-28 -PKG_SOURCE_VERSION:=7dcea0187a9b8bccd552487de91512c97c791e3d -PKG_MIRROR_HASH:=3a086013df4759f394c93a23254689fddd2dcbb06574d4898ea276bdabdf5bff +PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.bz2 +PKG_SOURCE_URL:=https://nmap.org/dist/ +PKG_HASH:=e14ab530e47b5afd88f1c8a2bac7f89cd8fe6b478e22d255c5b9bddb7a1c5778 PKG_LICENSE:=NPSL-0.94-or-NPSL-0.95 PKG_LICENSE_FILES:=LICENSE diff --git a/net/nmap/patches/010-Build-based-on-OpenSSL-version.patch b/net/nmap/patches/010-Build-based-on-OpenSSL-version.patch deleted file mode 100644 index 3a615dfb60..0000000000 --- a/net/nmap/patches/010-Build-based-on-OpenSSL-version.patch +++ /dev/null @@ -1,295 +0,0 @@ -From d6bea8dcdee36a3902cece14097993350306f1b6 Mon Sep 17 00:00:00 2001 -From: dmiller -Date: Tue, 6 Sep 2022 22:39:34 +0000 -Subject: [PATCH] Build based on OpenSSL version, not API level. Fixes #2516 - ---- - ncat/http_digest.c | 2 +- - ncat/ncat_connect.c | 4 ++-- - ncat/ncat_ssl.c | 6 +++--- - ncat/ncat_ssl.h | 12 ------------ - ncat/test/test-wildcard.c | 4 ++-- - nse_openssl.cc | 28 +++++++--------------------- - nse_ssl_cert.cc | 24 ++++++------------------ - nsock/src/nsock_ssl.c | 4 ++-- - nsock/src/nsock_ssl.h | 15 +-------------- - 9 files changed, 24 insertions(+), 75 deletions(-) - ---- a/ncat/http_digest.c -+++ b/ncat/http_digest.c -@@ -133,7 +133,7 @@ int http_digest_init_secret(void) - return 0; - } - --#if OPENSSL_API_LEVEL < 10100 -+#if OPENSSL_VERSION_NUMBER < 0x10100000L - #define EVP_MD_CTX_new EVP_MD_CTX_create - #define EVP_MD_CTX_free EVP_MD_CTX_destroy - #endif ---- a/ncat/ncat_connect.c -+++ b/ncat/ncat_connect.c -@@ -82,8 +82,8 @@ - #include - - /* Deprecated in OpenSSL 3.0 */ --#if OPENSSL_API_LEVEL >= 30000 --#define SSL_get_peer_certificate SSL_get1_peer_certificate -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L -+# define SSL_get_peer_certificate SSL_get1_peer_certificate - #endif - #endif - ---- a/ncat/ncat_ssl.c -+++ b/ncat/ncat_ssl.c -@@ -80,7 +80,7 @@ - #define FUNC_ASN1_STRING_data ASN1_STRING_data - #endif - --#if OPENSSL_API_LEVEL >= 30000 -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L - #include - /* Deprecated in OpenSSL 3.0 */ - #define SSL_get_peer_certificate SSL_get1_peer_certificate -@@ -117,7 +117,7 @@ SSL_CTX *setup_ssl_listen(void) - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - SSL_load_error_strings(); --#elif OPENSSL_API_LEVEL >= 30000 -+#elif OPENSSL_VERSION_NUMBER >= 0x30000000L - if (NULL == OSSL_PROVIDER_load(NULL, "legacy")) - { - loguser("OpenSSL legacy provider failed to load.\n"); -@@ -477,7 +477,7 @@ static int ssl_gen_cert(X509 **cert, EVP - const char *commonName = "localhost"; - char dNSName[128]; - int rc; --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - int ret = 0; - RSA *rsa = NULL; - BIGNUM *bne = NULL; ---- a/ncat/ncat_ssl.h -+++ b/ncat/ncat_ssl.h -@@ -67,18 +67,6 @@ - #include - #include - --/* OPENSSL_API_LEVEL per OpenSSL 3.0: decimal MMmmpp */ --#ifndef OPENSSL_API_LEVEL --# if OPENSSL_API_COMPAT < 0x900000L --# define OPENSSL_API_LEVEL (OPENSSL_API_COMPAT) --# else --# define OPENSSL_API_LEVEL \ -- (((OPENSSL_API_COMPAT >> 28) & 0xF) * 10000 \ -- + ((OPENSSL_API_COMPAT >> 20) & 0xFF) * 100 \ -- + ((OPENSSL_API_COMPAT >> 12) & 0xFF)) --# endif --#endif -- - #define NCAT_CA_CERTS_FILE "ca-bundle.crt" - - enum { ---- a/ncat/test/test-wildcard.c -+++ b/ncat/test/test-wildcard.c -@@ -20,7 +20,7 @@ are rejected. The SSL transactions happe - - #include "ncat_core.h" - #include "ncat_ssl.h" --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - #include - #endif - -@@ -294,7 +294,7 @@ stack_err: - static int gen_cert(X509 **cert, EVP_PKEY **key, - const struct lstr commonNames[], const struct lstr dNSNames[]) - { --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - int rc, ret=0; - RSA *rsa = NULL; - BIGNUM *bne = NULL; ---- a/nse_openssl.cc -+++ b/nse_openssl.cc -@@ -20,6 +20,9 @@ - #define FUNC_EVP_CIPHER_CTX_init EVP_CIPHER_CTX_reset - #define FUNC_EVP_CIPHER_CTX_cleanup EVP_CIPHER_CTX_reset - #define PASS_EVP_CTX(ctx) (ctx) -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L -+# include -+#endif - #else - #define FUNC_EVP_MD_CTX_init EVP_MD_CTX_init - #define FUNC_EVP_MD_CTX_cleanup EVP_MD_CTX_cleanup -@@ -37,23 +40,6 @@ extern NmapOps o; - - #include "nse_openssl.h" - --/* OPENSSL_API_LEVEL per OpenSSL 3.0: decimal MMmmpp */ --#ifndef OPENSSL_API_LEVEL --# if OPENSSL_API_COMPAT < 0x900000L --# define OPENSSL_API_LEVEL (OPENSSL_API_COMPAT) --# else --# define OPENSSL_API_LEVEL \ -- (((OPENSSL_API_COMPAT >> 28) & 0xF) * 10000 \ -- + ((OPENSSL_API_COMPAT >> 20) & 0xFF) * 100 \ -- + ((OPENSSL_API_COMPAT >> 12) & 0xFF)) --# endif --#endif -- -- --#if OPENSSL_API_LEVEL >= 30000 --#include --#endif -- - #define NSE_SSL_LUA_ERR(_L) \ - luaL_error(_L, "OpenSSL error: %s", ERR_error_string(ERR_get_error(), NULL)) - -@@ -184,7 +170,7 @@ static int l_bignum_is_prime( lua_State - bignum_data_t * p = (bignum_data_t *) luaL_checkudata( L, 1, "BIGNUM" ); - BN_CTX * ctx = BN_CTX_new(); - int is_prime = --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - BN_is_prime_ex( p->bn, BN_prime_checks, ctx, NULL ); - #else - BN_check_prime( p->bn, ctx, NULL ); -@@ -199,7 +185,7 @@ static int l_bignum_is_safe_prime( lua_S - bignum_data_t * p = (bignum_data_t *) luaL_checkudata( L, 1, "BIGNUM" ); - BN_CTX * ctx = BN_CTX_new(); - int is_prime = --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - BN_is_prime_ex( p->bn, BN_prime_checks, ctx, NULL ); - #else - BN_check_prime( p->bn, ctx, NULL ); -@@ -210,7 +196,7 @@ static int l_bignum_is_safe_prime( lua_S - BN_sub_word( n, (BN_ULONG)1 ); - BN_div_word( n, (BN_ULONG)2 ); - is_safe = --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - BN_is_prime_ex( n, BN_prime_checks, ctx, NULL ); - #else - BN_check_prime( n, ctx, NULL ); -@@ -582,7 +568,7 @@ LUALIB_API int luaopen_openssl(lua_State - #if OPENSSL_VERSION_NUMBER < 0x10100000L || defined LIBRESSL_VERSION_NUMBER - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); --#elif OPENSSL_API_LEVEL >= 30000 -+#elif OPENSSL_VERSION_NUMBER >= 0x30000000L - if (NULL == OSSL_PROVIDER_load(NULL, "legacy") && o.debugging > 1) - { - // Legacy provider may not be available. ---- a/nse_ssl_cert.cc -+++ b/nse_ssl_cert.cc -@@ -89,19 +89,7 @@ - #define X509_get0_notAfter X509_get_notAfter - #endif - --/* OPENSSL_API_LEVEL per OpenSSL 3.0: decimal MMmmpp */ --#ifndef OPENSSL_API_LEVEL --# if OPENSSL_API_COMPAT < 0x900000L --# define OPENSSL_API_LEVEL (OPENSSL_API_COMPAT) --# else --# define OPENSSL_API_LEVEL \ -- (((OPENSSL_API_COMPAT >> 28) & 0xF) * 10000 \ -- + ((OPENSSL_API_COMPAT >> 20) & 0xFF) * 100 \ -- + ((OPENSSL_API_COMPAT >> 12) & 0xFF)) --# endif --#endif -- --#if OPENSSL_API_LEVEL >= 30000 -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L - #include - /* Deprecated in OpenSSL 3.0 */ - #define SSL_get_peer_certificate SSL_get1_peer_certificate -@@ -459,7 +447,7 @@ static const char *pkey_type_to_string(i - } - - int lua_push_ecdhparams(lua_State *L, EVP_PKEY *pubkey) { --#if OPENSSL_API_LEVEL >= 30000 -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L - char tmp[64] = {0}; - size_t len = 0; - /* This structure (ecdhparams.curve_params) comes from tls.lua */ -@@ -634,7 +622,7 @@ static int parse_ssl_cert(lua_State *L, - else - #endif - if (pkey_type == EVP_PKEY_RSA) { --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - RSA *rsa = EVP_PKEY_get1_RSA(pubkey); - if (rsa) { - #endif -@@ -643,7 +631,7 @@ static int parse_ssl_cert(lua_State *L, - luaL_getmetatable( L, "BIGNUM" ); - lua_setmetatable( L, -2 ); - #if HAVE_OPAQUE_STRUCTS --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - const BIGNUM *n = NULL, *e = NULL; - data->should_free = false; - RSA_get0_key(rsa, &n, &e, NULL); -@@ -663,7 +651,7 @@ static int parse_ssl_cert(lua_State *L, - luaL_getmetatable( L, "BIGNUM" ); - lua_setmetatable( L, -2 ); - #if HAVE_OPAQUE_STRUCTS --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - data->should_free = false; - #else - data->should_free = true; -@@ -673,7 +661,7 @@ static int parse_ssl_cert(lua_State *L, - data->bn = rsa->n; - #endif - lua_setfield(L, -2, "modulus"); --#if OPENSSL_API_LEVEL < 30000 -+#if OPENSSL_VERSION_NUMBER < 0x30000000L - RSA_free(rsa); - } - #endif ---- a/nsock/src/nsock_ssl.c -+++ b/nsock/src/nsock_ssl.c -@@ -64,7 +64,7 @@ - #include "netutils.h" - - #if HAVE_OPENSSL --#if OPENSSL_API_LEVEL >= 30000 -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L - #include - #endif - -@@ -120,7 +120,7 @@ static SSL_CTX *ssl_init_helper(const SS - SSL_library_init(); - #else - OPENSSL_atexit(nsock_ssl_atexit); --#if OPENSSL_API_LEVEL >= 30000 -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L - if (NULL == OSSL_PROVIDER_load(NULL, "legacy")) - { - nsock_log_error("OpenSSL legacy provider failed to load.\n"); ---- a/nsock/src/nsock_ssl.h -+++ b/nsock/src/nsock_ssl.h -@@ -69,20 +69,7 @@ - #include - #include - --/* OPENSSL_API_LEVEL per OpenSSL 3.0: decimal MMmmpp */ --#ifndef OPENSSL_API_LEVEL --# if OPENSSL_API_COMPAT < 0x900000L --# define OPENSSL_API_LEVEL (OPENSSL_API_COMPAT) --# else --# define OPENSSL_API_LEVEL \ -- (((OPENSSL_API_COMPAT >> 28) & 0xF) * 10000 \ -- + ((OPENSSL_API_COMPAT >> 20) & 0xFF) * 100 \ -- + ((OPENSSL_API_COMPAT >> 12) & 0xFF)) --# endif --#endif -- -- --#if OPENSSL_API_LEVEL >= 30000 -+#if OPENSSL_VERSION_NUMBER >= 0x30000000L - /* Deprecated in OpenSSL 3.0 */ - #define SSL_get_peer_certificate SSL_get1_peer_certificate - #endif diff --git a/net/nmap/patches/030-ncat-drop-ca-bundle.patch b/net/nmap/patches/030-ncat-drop-ca-bundle.patch index 0112c03f09..d7c75f2ec0 100644 --- a/net/nmap/patches/030-ncat-drop-ca-bundle.patch +++ b/net/nmap/patches/030-ncat-drop-ca-bundle.patch @@ -23,7 +23,7 @@ Also remove references to NCAT_CA_CERTS_FILE and NCAT_CA_CERTS_PATH in order to ifneq ($(NOLUA),yes) --- a/ncat/ncat_posix.c +++ b/ncat/ncat_posix.c -@@ -347,28 +347,17 @@ void set_lf_mode(void) +@@ -357,28 +357,17 @@ void set_lf_mode(void) #ifdef HAVE_OPENSSL @@ -55,7 +55,7 @@ Also remove references to NCAT_CA_CERTS_FILE and NCAT_CA_CERTS_PATH in order to #endif --- a/ncat/ncat_ssl.h +++ b/ncat/ncat_ssl.h -@@ -67,8 +67,6 @@ +@@ -66,8 +66,6 @@ #include #include From 6b1a859678b857a9ab4683e2e2696a71a09b940c Mon Sep 17 00:00:00 2001 From: Christian Marangi Date: Sun, 28 Apr 2024 12:33:19 +0200 Subject: [PATCH 18/20] nmap: add patch fixing compilation error with no OpenSSL DTLS Add patch fixing compilation error with no OpenSSL DTLS support. Signed-off-by: Christian Marangi --- ...mpilation-error-with-OPENSSL_NO_DTLS.patch | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 net/nmap/patches/100-nsock-Fix-compilation-error-with-OPENSSL_NO_DTLS.patch diff --git a/net/nmap/patches/100-nsock-Fix-compilation-error-with-OPENSSL_NO_DTLS.patch b/net/nmap/patches/100-nsock-Fix-compilation-error-with-OPENSSL_NO_DTLS.patch new file mode 100644 index 0000000000..6f4e6f1b25 --- /dev/null +++ b/net/nmap/patches/100-nsock-Fix-compilation-error-with-OPENSSL_NO_DTLS.patch @@ -0,0 +1,67 @@ +From 707812db69cc5fdb8b6b4417d3a6b18405116d9f Mon Sep 17 00:00:00 2001 +From: Christian Marangi +Date: Sun, 28 Apr 2024 12:00:02 +0200 +Subject: [PATCH] nsock: Fix compilation error with OPENSSL_NO_DTLS + +Commit ba26cc78f207 ("Replace check for DTLS_client_method with +OPENSSL_NO_DTLS") made DTLS support depend on the openssl define +directly but leave some use of dtlsctx not guarded by ifdef. + +Fix this by adding to the remaining use of dtlsctx ifdef guard and +return fatal print for running function with unsupported OpenSSL +feature. + +Fixes: ba26cc78f207 ("Replace check for DTLS_client_method with OPENSSL_NO_DTLS") +Signed-off-by: Christian Marangi +--- + nsock/src/nsock_connect.c | 4 ++++ + nsock/src/nsock_core.c | 9 ++++++++- + nsock/src/nsock_pool.c | 2 ++ + 3 files changed, 14 insertions(+), 1 deletion(-) + +--- a/nsock/src/nsock_connect.c ++++ b/nsock/src/nsock_connect.c +@@ -472,8 +472,12 @@ nsock_event_id nsock_connect_ssl(nsock_p + + if (proto == IPPROTO_UDP) + { ++#ifndef OPENSSL_NO_DTLS + if (!ms->dtlsctx) + nsock_pool_dtls_init(ms, 0); ++#else ++ fatal("%s called with no OpenSSL DTLS support", __func__); ++#endif + } + else + { +--- a/nsock/src/nsock_core.c ++++ b/nsock/src/nsock_core.c +@@ -364,7 +364,14 @@ void handle_connect_result(struct npool + if (nse->type == NSE_TYPE_CONNECT_SSL && + nse->status == NSE_STATUS_SUCCESS) { + #if HAVE_OPENSSL +- sslctx = iod->lastproto == IPPROTO_UDP ? ms->dtlsctx : ms->sslctx; ++ if (iod->lastproto == IPPROTO_UDP) ++#ifndef OPENSSL_NO_DTLS ++ sslctx = ms->dtlsctx; ++#else ++ fatal("%s called with no OpenSSL DTLS support", __func__); ++#endif ++ else ++ sslctx = ms->sslctx; + assert(sslctx != NULL); + /* Reuse iod->ssl if present. If set, this is the second try at connection + without the SSL_OP_NO_SSLv2 option set. */ +--- a/nsock/src/nsock_pool.c ++++ b/nsock/src/nsock_pool.c +@@ -178,8 +178,10 @@ nsock_pool nsock_pool_new(void *userdata + + #if HAVE_OPENSSL + nsp->sslctx = NULL; ++#ifndef OPENSSL_NO_DTLS + nsp->dtlsctx = NULL; + #endif ++#endif + + nsp->px_chain = NULL; + From 269b1a0b6d68d2b66a55ae6cf9cb8007291ecb96 Mon Sep 17 00:00:00 2001 From: Paul Spooren Date: Thu, 21 Mar 2024 00:03:43 +0100 Subject: [PATCH 19/20] apk: move package to core This will become part of openwrt.git and used within the build system. Signed-off-by: Paul Spooren --- utils/apk/Makefile | 75 ---------- ...vel@lists.alpinelinux.org-4a6a0840.rsa.pub | 9 -- ...vel@lists.alpinelinux.org-5243ef4b.rsa.pub | 9 -- ...vel@lists.alpinelinux.org-5261cecb.rsa.pub | 9 -- utils/apk/files/alpine-repositories | 3 - .../patches/0001-remove-doc-generation.patch | 21 --- .../apk/patches/010-openssl-deprecated.patch | 131 ------------------ utils/apk/test.sh | 7 - 8 files changed, 264 deletions(-) delete mode 100644 utils/apk/Makefile delete mode 100644 utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub delete mode 100644 utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub delete mode 100644 utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub delete mode 100644 utils/apk/files/alpine-repositories delete mode 100644 utils/apk/patches/0001-remove-doc-generation.patch delete mode 100644 utils/apk/patches/010-openssl-deprecated.patch delete mode 100644 utils/apk/test.sh diff --git a/utils/apk/Makefile b/utils/apk/Makefile deleted file mode 100644 index 56e473c5b5..0000000000 --- a/utils/apk/Makefile +++ /dev/null @@ -1,75 +0,0 @@ -include $(TOPDIR)/rules.mk - -PKG_NAME:=apk -PKG_VERSION:=2.14.0 -PKG_RELEASE:=1 - -PKG_SOURCE:=apk-tools-v$(PKG_VERSION).tar.gz -PKG_SOURCE_URL:=https://gitlab.alpinelinux.org/alpine/apk-tools/-/archive/v$(PKG_VERSION) -PKG_HASH:=4c6db13039280814a10a3b3d89b29837b59769c69214a1861997e088eac107a5 -PKG_BUILD_DIR:=$(BUILD_DIR)/apk-tools-v$(PKG_VERSION) - -PKG_MAINTAINER:=Paul Spooren -PKG_LICENSE:=GPL-2.0-only -PKG_LICENSE_FILES:=LICENSE - -PKG_INSTALL:=1 -PKG_BUILD_PARALLEL:=1 - -include $(INCLUDE_DIR)/package.mk - -define Package/apk - SECTION:=utils - CATEGORY:=Utilities - TITLE:=apk package manager - DEPENDS:=+zlib +libopenssl @!arc - URL:=$(PKG_SOURCE_URL) -endef - -define Package/alpine-keys - SECTION:=utils - CATEGORY:=Utilities - TITLE:=Alpine apk public signing keys - DEPENDS:=apk -endef - -define Package/alpine-repositories - SECTION:=utils - CATEGORY:=Utilities - TITLE:=Official Alpine repositories - DEPENDS:=apk -endef - -MAKE_FLAGS += LUA=no - -define Package/apk/install - $(INSTALL_DIR) $(1)/lib/apk/db - - $(INSTALL_DIR) $(1)/bin - $(INSTALL_BIN) $(PKG_INSTALL_DIR)/sbin/apk $(1)/bin/apk - - $(INSTALL_DIR) $(1)/usr/lib - $(INSTALL_DATA) $(PKG_INSTALL_DIR)/lib/* $(1)/usr/lib/ - - $(INSTALL_DIR) $(1)/usr/lib/pkgconfig - $(INSTALL_DATA) $(PKG_INSTALL_DIR)/usr/lib/pkgconfig/apk.pc \ - $(1)/usr/lib/pkgconfig/ - - $(INSTALL_DIR) $(1)/etc/apk/ - echo $(ARCH) > $(1)/etc/apk/arch - touch $(1)/etc/apk/world -endef - -define Package/alpine-keys/install - $(INSTALL_DIR) $(1)/etc/apk/keys - $(INSTALL_DATA) ./files/alpine-keys/* $(1)/etc/apk/keys -endef - -define Package/alpine-repositories/install - $(INSTALL_DIR) $(1)/etc/apk/keys - $(INSTALL_DATA) ./files/alpine-repositories $(1)/etc/apk/repositories -endef - -$(eval $(call BuildPackage,apk)) -$(eval $(call BuildPackage,alpine-keys)) -$(eval $(call BuildPackage,alpine-repositories)) diff --git a/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub b/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub deleted file mode 100644 index bb4bdc80fd..0000000000 --- a/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-4a6a0840.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe -qxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O -Q0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA -jixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R -L5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo -GuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B -ywIDAQAB ------END PUBLIC KEY----- diff --git a/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub b/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub deleted file mode 100644 index 6cbfad7441..0000000000 --- a/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5243ef4b.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+ -mTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy -DO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K -aA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G -mnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0 -sS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg -cQIDAQAB ------END PUBLIC KEY----- diff --git a/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub b/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub deleted file mode 100644 index 83f0658e9c..0000000000 --- a/utils/apk/files/alpine-keys/alpine-devel@lists.alpinelinux.org-5261cecb.rsa.pub +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN PUBLIC KEY----- -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0 -cGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX -yHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j -g01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB -Ca1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY -sWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw -wwIDAQAB ------END PUBLIC KEY----- diff --git a/utils/apk/files/alpine-repositories b/utils/apk/files/alpine-repositories deleted file mode 100644 index 5babbb23b4..0000000000 --- a/utils/apk/files/alpine-repositories +++ /dev/null @@ -1,3 +0,0 @@ -https://dl-cdn.alpinelinux.org/alpine/edge/main -https://dl-cdn.alpinelinux.org/alpine/edge/community - diff --git a/utils/apk/patches/0001-remove-doc-generation.patch b/utils/apk/patches/0001-remove-doc-generation.patch deleted file mode 100644 index dee05c56f2..0000000000 --- a/utils/apk/patches/0001-remove-doc-generation.patch +++ /dev/null @@ -1,21 +0,0 @@ -From b05a93c48fdbb50f0c464310dc2ce45777d32ea2 Mon Sep 17 00:00:00 2001 -From: Paul Spooren -Date: Fri, 2 Oct 2020 14:08:52 -1000 -Subject: [PATCH] remove doc generation - -Signed-off-by: Paul Spooren ---- - Makefile | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - ---- a/Makefile -+++ b/Makefile -@@ -25,7 +25,7 @@ export DESTDIR SBINDIR LIBDIR CONFDIR MA - ## - # Top-level subdirs - --subdirs := libfetch/ src/ doc/ -+subdirs := libfetch/ src/ - - ## - # Include all rules and stuff diff --git a/utils/apk/patches/010-openssl-deprecated.patch b/utils/apk/patches/010-openssl-deprecated.patch deleted file mode 100644 index bdfafb6712..0000000000 --- a/utils/apk/patches/010-openssl-deprecated.patch +++ /dev/null @@ -1,131 +0,0 @@ -From c4c8aa5ba0ec6bf4c6d74c4807b66edfbd91be7c Mon Sep 17 00:00:00 2001 -From: Rosen Penev -Date: Mon, 11 Jan 2021 01:51:58 -0800 -Subject: [PATCH] fix compilation without deprecated OpenSSL APIs - -(De)initialization is deprecated under OpenSSL 1.0 and above. - -[TT: Some simplifications, and additional edits.] - -Signed-off-by: Rosen Penev ---- - libfetch/common.c | 12 ++++-------- - src/apk.c | 26 +------------------------- - src/apk_openssl.h | 27 +++++++++++++++++++++++++++ - 3 files changed, 32 insertions(+), 33 deletions(-) - ---- a/libfetch/common.c -+++ b/libfetch/common.c -@@ -583,15 +583,11 @@ static int fetch_ssl_setup_client_certif - int - fetch_ssl(conn_t *conn, const struct url *URL, int verbose) - { -- /* Init the SSL library and context */ -- if (!SSL_library_init()){ -- fprintf(stderr, "SSL library init failed\n"); -- return (-1); -- } -- -- SSL_load_error_strings(); -- -+#if OPENSSL_VERSION_NUMBER < 0x10100000L - conn->ssl_meth = SSLv23_client_method(); -+#else -+ conn->ssl_meth = TLS_client_method(); -+#endif - conn->ssl_ctx = SSL_CTX_new(conn->ssl_meth); - SSL_CTX_set_mode(conn->ssl_ctx, SSL_MODE_AUTO_RETRY); - ---- a/src/apk.c -+++ b/src/apk.c -@@ -20,11 +20,6 @@ - #include - #include - --#include --#ifndef OPENSSL_NO_ENGINE --#include --#endif -- - #include - - #include "apk_defines.h" -@@ -423,25 +418,6 @@ static int parse_options(int argc, char - return 0; - } - --static void fini_openssl(void) --{ -- EVP_cleanup(); --#ifndef OPENSSL_NO_ENGINE -- ENGINE_cleanup(); --#endif -- CRYPTO_cleanup_all_ex_data(); --} -- --static void init_openssl(void) --{ -- atexit(fini_openssl); -- OpenSSL_add_all_algorithms(); --#ifndef OPENSSL_NO_ENGINE -- ENGINE_load_builtin_engines(); -- ENGINE_register_all_complete(); --#endif --} -- - static void on_sigwinch(int s) - { - apk_reset_screen_width(); -@@ -534,7 +510,7 @@ int main(int argc, char **argv) - apk_force |= applet->forced_force; - } - -- init_openssl(); -+ apk_openssl_init(); - setup_automatic_flags(); - fetchTimeout = 60; - fetchRedirectMethod = fetch_redirect; ---- a/src/apk_openssl.h -+++ b/src/apk_openssl.h -@@ -11,7 +11,11 @@ - #define APK_SSL_COMPAT_H - - #include -+#include - #include -+#ifndef OPENSSL_NO_ENGINE -+#include -+#endif - - #if OPENSSL_VERSION_NUMBER < 0x1010000fL || (defined(LIBRESSL_VERSION_NUMBER) && LIBRESSL_VERSION_NUMBER < 0x2070000fL) - -@@ -25,6 +29,29 @@ static inline void EVP_MD_CTX_free(EVP_M - return EVP_MD_CTX_destroy(mdctx); - } - -+static inline void apk_openssl_cleanup(void) -+{ -+ EVP_cleanup(); -+#ifndef OPENSSL_NO_ENGINE -+ ENGINE_cleanup(); -+#endif -+ CRYPTO_cleanup_all_ex_data(); -+} -+ -+static inline void apk_openssl_init(void) -+{ -+ atexit(apk_openssl_cleanup); -+ OpenSSL_add_all_algorithms(); -+#ifndef OPENSSL_NO_ENGINE -+ ENGINE_load_builtin_engines(); -+ ENGINE_register_all_complete(); -+#endif -+} -+ -+#else -+ -+static inline void apk_openssl_init(void) {} -+ - #endif - - #endif diff --git a/utils/apk/test.sh b/utils/apk/test.sh deleted file mode 100644 index 234cf5a799..0000000000 --- a/utils/apk/test.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -case "$1" in - "apk") - apk --version | grep "${2#*v}" - ;; -esac From 3fefdbf34bbe2601fcd677fd887e4156214b37ac Mon Sep 17 00:00:00 2001 From: brvphoenix Date: Mon, 29 Apr 2024 17:08:50 +0800 Subject: [PATCH 20/20] sing-box: update to 1.8.12 Signed-off-by: brvphoenix --- net/sing-box/Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/net/sing-box/Makefile b/net/sing-box/Makefile index 27bea3116e..62b9d67a89 100644 --- a/net/sing-box/Makefile +++ b/net/sing-box/Makefile @@ -1,12 +1,12 @@ include $(TOPDIR)/rules.mk PKG_NAME:=sing-box -PKG_VERSION:=1.8.10 +PKG_VERSION:=1.8.12 PKG_RELEASE:=1 PKG_SOURCE:=$(PKG_NAME)-$(PKG_VERSION).tar.gz PKG_SOURCE_URL:=https://codeload.github.com/SagerNet/sing-box/tar.gz/v$(PKG_VERSION)? -PKG_HASH:=a959f9a40148ed4166b8161072672f3ce1532957adef7717132c7277bb96dcf6 +PKG_HASH:=802eb5e202ac1dd846b1f529b3df9e5d69452182fd5d70f7c8f2a819c9e86162 PKG_LICENSE:=GPL-3.0-or-later PKG_LICENSE_FILES:=LICENSE