summaryrefslogtreecommitdiff
path: root/gnu/packages/patches
diff options
context:
space:
mode:
Diffstat (limited to 'gnu/packages/patches')
-rw-r--r--gnu/packages/patches/connman-CVE-2022-32292.patch34
-rw-r--r--gnu/packages/patches/connman-CVE-2022-32293-pt1.patch140
-rw-r--r--gnu/packages/patches/connman-CVE-2022-32293-pt2.patch173
-rw-r--r--gnu/packages/patches/connman-add-missing-libppp-compat.h.patch140
-rw-r--r--gnu/packages/patches/converseen-hide-updates-checks.patch24
-rw-r--r--gnu/packages/patches/ergodox-firmware-fix-json-target.patch1405
-rw-r--r--gnu/packages/patches/ergodox-firmware-fix-numpad.patch18
-rw-r--r--gnu/packages/patches/glibc-2.35-CVE-2023-4911.patch160
-rw-r--r--gnu/packages/patches/icecat-compare-paths.patch21
-rw-r--r--gnu/packages/patches/icecat-use-system-wide-dir.patch36
-rw-r--r--gnu/packages/patches/qmk-firmware-fix-hacker-dvorak.patch15
-rw-r--r--gnu/packages/patches/qtbase-5-use-TZDIR.patch39
-rw-r--r--gnu/packages/patches/qtbase-use-TZDIR.patch162
-rw-r--r--gnu/packages/patches/rust-1.70-fix-rustix-build.patch21
-rw-r--r--gnu/packages/patches/rust-openssl-sys-no-vendor.patch32
-rw-r--r--gnu/packages/patches/teuchos-remove-duplicate-using.patch34
-rw-r--r--gnu/packages/patches/tootle-glib-object-naming.patch66
-rw-r--r--gnu/packages/patches/tootle-reason-phrase.patch48
-rw-r--r--gnu/packages/patches/tpetra-remove-duplicate-using.patch18
-rw-r--r--gnu/packages/patches/u-boot-patman-change-id.patch232
-rw-r--r--gnu/packages/patches/unison-fix-ocaml-4.08.patch81
21 files changed, 2283 insertions, 616 deletions
diff --git a/gnu/packages/patches/connman-CVE-2022-32292.patch b/gnu/packages/patches/connman-CVE-2022-32292.patch
deleted file mode 100644
index cbe30742e1..0000000000
--- a/gnu/packages/patches/connman-CVE-2022-32292.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-https://git.kernel.org/pub/scm/network/connman/connman.git/patch/?id=d1a5ede5d255bde8ef707f8441b997563b9312bd
-
-From d1a5ede5d255bde8ef707f8441b997563b9312bd Mon Sep 17 00:00:00 2001
-From: Nathan Crandall <ncrandall@tesla.com>
-Date: Tue, 12 Jul 2022 08:56:34 +0200
-Subject: gweb: Fix OOB write in received_data()
-
-There is a mismatch of handling binary vs. C-string data with memchr
-and strlen, resulting in pos, count, and bytes_read to become out of
-sync and result in a heap overflow. Instead, do not treat the buffer
-as an ASCII C-string. We calculate the count based on the return value
-of memchr, instead of strlen.
-
-Fixes: CVE-2022-32292
----
- gweb/gweb.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/gweb/gweb.c b/gweb/gweb.c
-index 12fcb1d8..13c6c5f2 100644
---- a/gweb/gweb.c
-+++ b/gweb/gweb.c
-@@ -918,7 +918,7 @@ static gboolean received_data(GIOChannel *channel, GIOCondition cond,
- }
-
- *pos = '\0';
-- count = strlen((char *) ptr);
-+ count = pos - ptr;
- if (count > 0 && ptr[count - 1] == '\r') {
- ptr[--count] = '\0';
- bytes_read--;
---
-cgit
-
diff --git a/gnu/packages/patches/connman-CVE-2022-32293-pt1.patch b/gnu/packages/patches/connman-CVE-2022-32293-pt1.patch
deleted file mode 100644
index c4d1aec31b..0000000000
--- a/gnu/packages/patches/connman-CVE-2022-32293-pt1.patch
+++ /dev/null
@@ -1,140 +0,0 @@
-https://git.kernel.org/pub/scm/network/connman/connman.git/patch/?id=72343929836de80727a27d6744c869dff045757c
-
-From 72343929836de80727a27d6744c869dff045757c Mon Sep 17 00:00:00 2001
-From: Daniel Wagner <wagi@monom.org>
-Date: Tue, 5 Jul 2022 08:32:12 +0200
-Subject: wispr: Add reference counter to portal context
-
-Track the connman_wispr_portal_context live time via a
-refcounter. This only adds the infrastructure to do proper reference
-counting.
-
-Fixes: CVE-2022-32293
----
- src/wispr.c | 52 ++++++++++++++++++++++++++++++++++++++++++----------
- 1 file changed, 42 insertions(+), 10 deletions(-)
-
-diff --git a/src/wispr.c b/src/wispr.c
-index a07896ca..bde7e63b 100644
---- a/src/wispr.c
-+++ b/src/wispr.c
-@@ -56,6 +56,7 @@ struct wispr_route {
- };
-
- struct connman_wispr_portal_context {
-+ int refcount;
- struct connman_service *service;
- enum connman_ipconfig_type type;
- struct connman_wispr_portal *wispr_portal;
-@@ -97,6 +98,11 @@ static char *online_check_ipv4_url = NULL;
- static char *online_check_ipv6_url = NULL;
- static bool enable_online_to_ready_transition = false;
-
-+#define wispr_portal_context_ref(wp_context) \
-+ wispr_portal_context_ref_debug(wp_context, __FILE__, __LINE__, __func__)
-+#define wispr_portal_context_unref(wp_context) \
-+ wispr_portal_context_unref_debug(wp_context, __FILE__, __LINE__, __func__)
-+
- static void connman_wispr_message_init(struct connman_wispr_message *msg)
- {
- DBG("");
-@@ -162,9 +168,6 @@ static void free_connman_wispr_portal_context(
- {
- DBG("context %p", wp_context);
-
-- if (!wp_context)
-- return;
--
- if (wp_context->wispr_portal) {
- if (wp_context->wispr_portal->ipv4_context == wp_context)
- wp_context->wispr_portal->ipv4_context = NULL;
-@@ -201,9 +204,38 @@ static void free_connman_wispr_portal_context(
- g_free(wp_context);
- }
-
-+static struct connman_wispr_portal_context *
-+wispr_portal_context_ref_debug(struct connman_wispr_portal_context *wp_context,
-+ const char *file, int line, const char *caller)
-+{
-+ DBG("%p ref %d by %s:%d:%s()", wp_context,
-+ wp_context->refcount + 1, file, line, caller);
-+
-+ __sync_fetch_and_add(&wp_context->refcount, 1);
-+
-+ return wp_context;
-+}
-+
-+static void wispr_portal_context_unref_debug(
-+ struct connman_wispr_portal_context *wp_context,
-+ const char *file, int line, const char *caller)
-+{
-+ if (!wp_context)
-+ return;
-+
-+ DBG("%p ref %d by %s:%d:%s()", wp_context,
-+ wp_context->refcount - 1, file, line, caller);
-+
-+ if (__sync_fetch_and_sub(&wp_context->refcount, 1) != 1)
-+ return;
-+
-+ free_connman_wispr_portal_context(wp_context);
-+}
-+
- static struct connman_wispr_portal_context *create_wispr_portal_context(void)
- {
-- return g_try_new0(struct connman_wispr_portal_context, 1);
-+ return wispr_portal_context_ref(
-+ g_new0(struct connman_wispr_portal_context, 1));
- }
-
- static void free_connman_wispr_portal(gpointer data)
-@@ -215,8 +247,8 @@ static void free_connman_wispr_portal(gpointer data)
- if (!wispr_portal)
- return;
-
-- free_connman_wispr_portal_context(wispr_portal->ipv4_context);
-- free_connman_wispr_portal_context(wispr_portal->ipv6_context);
-+ wispr_portal_context_unref(wispr_portal->ipv4_context);
-+ wispr_portal_context_unref(wispr_portal->ipv6_context);
-
- g_free(wispr_portal);
- }
-@@ -452,7 +484,7 @@ static void portal_manage_status(GWebResult *result,
- connman_info("Client-Timezone: %s", str);
-
- if (!enable_online_to_ready_transition)
-- free_connman_wispr_portal_context(wp_context);
-+ wispr_portal_context_unref(wp_context);
-
- __connman_service_ipconfig_indicate_state(service,
- CONNMAN_SERVICE_STATE_ONLINE, type);
-@@ -616,7 +648,7 @@ static void wispr_portal_request_wispr_login(struct connman_service *service,
- return;
- }
-
-- free_connman_wispr_portal_context(wp_context);
-+ wispr_portal_context_unref(wp_context);
- return;
- }
-
-@@ -952,7 +984,7 @@ static int wispr_portal_detect(struct connman_wispr_portal_context *wp_context)
-
- if (wp_context->token == 0) {
- err = -EINVAL;
-- free_connman_wispr_portal_context(wp_context);
-+ wispr_portal_context_unref(wp_context);
- }
- } else if (wp_context->timeout == 0) {
- wp_context->timeout = g_idle_add(no_proxy_callback, wp_context);
-@@ -1001,7 +1033,7 @@ int __connman_wispr_start(struct connman_service *service,
-
- /* If there is already an existing context, we wipe it */
- if (wp_context)
-- free_connman_wispr_portal_context(wp_context);
-+ wispr_portal_context_unref(wp_context);
-
- wp_context = create_wispr_portal_context();
- if (!wp_context)
---
-cgit
-
diff --git a/gnu/packages/patches/connman-CVE-2022-32293-pt2.patch b/gnu/packages/patches/connman-CVE-2022-32293-pt2.patch
deleted file mode 100644
index 556e69e397..0000000000
--- a/gnu/packages/patches/connman-CVE-2022-32293-pt2.patch
+++ /dev/null
@@ -1,173 +0,0 @@
-https://git.kernel.org/pub/scm/network/connman/connman.git/patch/?id=416bfaff988882c553c672e5bfc2d4f648d29e8a
-
-From 416bfaff988882c553c672e5bfc2d4f648d29e8a Mon Sep 17 00:00:00 2001
-From: Daniel Wagner <wagi@monom.org>
-Date: Tue, 5 Jul 2022 09:11:09 +0200
-Subject: wispr: Update portal context references
-
-Maintain proper portal context references to avoid UAF.
-
-Fixes: CVE-2022-32293
----
- src/wispr.c | 34 ++++++++++++++++++++++------------
- 1 file changed, 22 insertions(+), 12 deletions(-)
-
-diff --git a/src/wispr.c b/src/wispr.c
-index bde7e63b..84bed33f 100644
---- a/src/wispr.c
-+++ b/src/wispr.c
-@@ -105,8 +105,6 @@ static bool enable_online_to_ready_transition = false;
-
- static void connman_wispr_message_init(struct connman_wispr_message *msg)
- {
-- DBG("");
--
- msg->has_error = false;
- msg->current_element = NULL;
-
-@@ -166,8 +164,6 @@ static void free_wispr_routes(struct connman_wispr_portal_context *wp_context)
- static void free_connman_wispr_portal_context(
- struct connman_wispr_portal_context *wp_context)
- {
-- DBG("context %p", wp_context);
--
- if (wp_context->wispr_portal) {
- if (wp_context->wispr_portal->ipv4_context == wp_context)
- wp_context->wispr_portal->ipv4_context = NULL;
-@@ -483,9 +479,6 @@ static void portal_manage_status(GWebResult *result,
- &str))
- connman_info("Client-Timezone: %s", str);
-
-- if (!enable_online_to_ready_transition)
-- wispr_portal_context_unref(wp_context);
--
- __connman_service_ipconfig_indicate_state(service,
- CONNMAN_SERVICE_STATE_ONLINE, type);
-
-@@ -546,14 +539,17 @@ static void wispr_portal_request_portal(
- {
- DBG("");
-
-+ wispr_portal_context_ref(wp_context);
- wp_context->request_id = g_web_request_get(wp_context->web,
- wp_context->status_url,
- wispr_portal_web_result,
- wispr_route_request,
- wp_context);
-
-- if (wp_context->request_id == 0)
-+ if (wp_context->request_id == 0) {
- wispr_portal_error(wp_context);
-+ wispr_portal_context_unref(wp_context);
-+ }
- }
-
- static bool wispr_input(const guint8 **data, gsize *length,
-@@ -618,13 +614,15 @@ static void wispr_portal_browser_reply_cb(struct connman_service *service,
- return;
-
- if (!authentication_done) {
-- wispr_portal_error(wp_context);
- free_wispr_routes(wp_context);
-+ wispr_portal_error(wp_context);
-+ wispr_portal_context_unref(wp_context);
- return;
- }
-
- /* Restarting the test */
- __connman_service_wispr_start(service, wp_context->type);
-+ wispr_portal_context_unref(wp_context);
- }
-
- static void wispr_portal_request_wispr_login(struct connman_service *service,
-@@ -700,11 +698,13 @@ static bool wispr_manage_message(GWebResult *result,
-
- wp_context->wispr_result = CONNMAN_WISPR_RESULT_LOGIN;
-
-+ wispr_portal_context_ref(wp_context);
- if (__connman_agent_request_login_input(wp_context->service,
- wispr_portal_request_wispr_login,
-- wp_context) != -EINPROGRESS)
-+ wp_context) != -EINPROGRESS) {
- wispr_portal_error(wp_context);
-- else
-+ wispr_portal_context_unref(wp_context);
-+ } else
- return true;
-
- break;
-@@ -753,6 +753,7 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
- if (length > 0) {
- g_web_parser_feed_data(wp_context->wispr_parser,
- chunk, length);
-+ wispr_portal_context_unref(wp_context);
- return true;
- }
-
-@@ -770,6 +771,7 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
-
- switch (status) {
- case 000:
-+ wispr_portal_context_ref(wp_context);
- __connman_agent_request_browser(wp_context->service,
- wispr_portal_browser_reply_cb,
- wp_context->status_url, wp_context);
-@@ -781,11 +783,14 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
- if (g_web_result_get_header(result, "X-ConnMan-Status",
- &str)) {
- portal_manage_status(result, wp_context);
-+ wispr_portal_context_unref(wp_context);
- return false;
-- } else
-+ } else {
-+ wispr_portal_context_ref(wp_context);
- __connman_agent_request_browser(wp_context->service,
- wispr_portal_browser_reply_cb,
- wp_context->redirect_url, wp_context);
-+ }
-
- break;
- case 300:
-@@ -798,6 +803,7 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
- !g_web_result_get_header(result, "Location",
- &redirect)) {
-
-+ wispr_portal_context_ref(wp_context);
- __connman_agent_request_browser(wp_context->service,
- wispr_portal_browser_reply_cb,
- wp_context->status_url, wp_context);
-@@ -808,6 +814,7 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
-
- wp_context->redirect_url = g_strdup(redirect);
-
-+ wispr_portal_context_ref(wp_context);
- wp_context->request_id = g_web_request_get(wp_context->web,
- redirect, wispr_portal_web_result,
- wispr_route_request, wp_context);
-@@ -820,6 +827,7 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
-
- break;
- case 505:
-+ wispr_portal_context_ref(wp_context);
- __connman_agent_request_browser(wp_context->service,
- wispr_portal_browser_reply_cb,
- wp_context->status_url, wp_context);
-@@ -832,6 +840,7 @@ static bool wispr_portal_web_result(GWebResult *result, gpointer user_data)
- wp_context->request_id = 0;
- done:
- wp_context->wispr_msg.message_type = -1;
-+ wispr_portal_context_unref(wp_context);
- return false;
- }
-
-@@ -890,6 +899,7 @@ static void proxy_callback(const char *proxy, void *user_data)
- xml_wispr_parser_callback, wp_context);
-
- wispr_portal_request_portal(wp_context);
-+ wispr_portal_context_unref(wp_context);
- }
-
- static gboolean no_proxy_callback(gpointer user_data)
---
-cgit
-
diff --git a/gnu/packages/patches/connman-add-missing-libppp-compat.h.patch b/gnu/packages/patches/connman-add-missing-libppp-compat.h.patch
new file mode 100644
index 0000000000..4432c161fd
--- /dev/null
+++ b/gnu/packages/patches/connman-add-missing-libppp-compat.h.patch
@@ -0,0 +1,140 @@
+From: Tobias Geerinckx-Rice <me@tobias.gr>
+Date: Sun Sep 24 02:00:00 2023 +0200
+Subject: connman: Add missing libppp-compat.h.
+
+This file was taken verbatim from upstream commit
+a48864a2e5d2a725dfc6eef567108bc13b43857f. Its absence was since
+fixed in upstream commit 09c1bbfd55647b18b5bbbb99ef2b6d902465ca16.
+
+---
+diff -Naur a/scripts/libppp-compat.h b/scripts/libppp-compat.h
+--- a/scripts/libppp-compat.h 1970-01-01 01:00:00.000000000 +0100
++++ b/scripts/libppp-compat.h 2023-09-29 00:45:18.726709659 +0200
+@@ -0,0 +1,127 @@
++/* Copyright (C) Eivind Naess, eivnaes@yahoo.com */
++/* SPDX-License-Identifier: GPL-2.0-or-later */
++
++#ifndef __LIBPPP_COMPAT_H__
++#define __LIBPPP_COMPAT_H__
++
++/* Define USE_EAPTLS compile with EAP TLS support against older pppd headers,
++ * pppd >= 2.5.0 use PPP_WITH_EAPTLS and is defined in pppdconf.h */
++#define USE_EAPTLS 1
++
++/* Define INET6 to compile with IPv6 support against older pppd headers,
++ * pppd >= 2.5.0 use PPP_WITH_IPV6CP and is defined in pppdconf.h */
++#define INET6 1
++
++/* PPP < 2.5.0 defines and exports VERSION which overlaps with current package VERSION define.
++ * this silly macro magic is to work around that. */
++#undef VERSION
++#include <pppd/pppd.h>
++
++#ifndef PPPD_VERSION
++#define PPPD_VERSION VERSION
++#endif
++
++#include <pppd/fsm.h>
++#include <pppd/ccp.h>
++#include <pppd/eui64.h>
++#include <pppd/ipcp.h>
++#include <pppd/ipv6cp.h>
++#include <pppd/eap.h>
++#include <pppd/upap.h>
++
++#ifdef HAVE_PPPD_CHAP_H
++#include <pppd/chap.h>
++#endif
++
++#ifdef HAVE_PPPD_CHAP_NEW_H
++#include <pppd/chap-new.h>
++#endif
++
++#ifdef HAVE_PPPD_CHAP_MS_H
++#include <pppd/chap_ms.h>
++#endif
++
++#ifndef PPP_PROTO_CHAP
++#define PPP_PROTO_CHAP 0xc223
++#endif
++
++#ifndef PPP_PROTO_EAP
++#define PPP_PROTO_EAP 0xc227
++#endif
++
++
++#if WITH_PPP_VERSION < PPP_VERSION(2,5,0)
++
++static inline bool
++debug_on (void)
++{
++ return debug;
++}
++
++static inline const char
++*ppp_ipparam (void)
++{
++ return ipparam;
++}
++
++static inline int
++ppp_ifunit (void)
++{
++ return ifunit;
++}
++
++static inline const char *
++ppp_ifname (void)
++{
++ return ifname;
++}
++
++static inline int
++ppp_get_mtu (int idx)
++{
++ return netif_get_mtu(idx);
++}
++
++typedef enum ppp_notify
++{
++ NF_PID_CHANGE,
++ NF_PHASE_CHANGE,
++ NF_EXIT,
++ NF_SIGNALED,
++ NF_IP_UP,
++ NF_IP_DOWN,
++ NF_IPV6_UP,
++ NF_IPV6_DOWN,
++ NF_AUTH_UP,
++ NF_LINK_DOWN,
++ NF_FORK,
++ NF_MAX_NOTIFY
++} ppp_notify_t;
++
++typedef void (ppp_notify_fn) (void *ctx, int arg);
++
++static inline void
++ppp_add_notify (ppp_notify_t type, ppp_notify_fn *func, void *ctx)
++{
++ struct notifier **list[NF_MAX_NOTIFY] = {
++ [NF_PID_CHANGE ] = &pidchange,
++ [NF_PHASE_CHANGE] = &phasechange,
++ [NF_EXIT ] = &exitnotify,
++ [NF_SIGNALED ] = &sigreceived,
++ [NF_IP_UP ] = &ip_up_notifier,
++ [NF_IP_DOWN ] = &ip_down_notifier,
++ [NF_IPV6_UP ] = &ipv6_up_notifier,
++ [NF_IPV6_DOWN ] = &ipv6_down_notifier,
++ [NF_AUTH_UP ] = &auth_up_notifier,
++ [NF_LINK_DOWN ] = &link_down_notifier,
++ [NF_FORK ] = &fork_notifier,
++ };
++
++ struct notifier **notify = list[type];
++ if (notify) {
++ add_notifier(notify, func, ctx);
++ }
++}
++
++#endif /* #if WITH_PPP_VERSION < PPP_VERSION(2,5,0) */
++#endif /* #if__LIBPPP_COMPAT_H__ */
diff --git a/gnu/packages/patches/converseen-hide-updates-checks.patch b/gnu/packages/patches/converseen-hide-updates-checks.patch
index c4c80b5bec..6d3aabdb1a 100644
--- a/gnu/packages/patches/converseen-hide-updates-checks.patch
+++ b/gnu/packages/patches/converseen-hide-updates-checks.patch
@@ -43,17 +43,17 @@ index 704e75c..bb3f9b0 100755
int idx = comboLangs->findText(t.currentLanguage(), Qt::MatchExactly);
diff --git a/src/mainwindowimpl.cpp b/src/mainwindowimpl.cpp
-index 2b6d68d..83a365f 100755
+index 6562e35..d19bc28 100755
--- a/src/mainwindowimpl.cpp
+++ b/src/mainwindowimpl.cpp
-@@ -164,7 +164,6 @@ void MainWindowImpl::createActions()
+@@ -163,7 +163,6 @@ void MainWindowImpl::createActions()
connect(actionInfo, SIGNAL(triggered()), this, SLOT(about()));
connect(actionDonatePaypal, SIGNAL(triggered()), this, SLOT(openPaypalLink()));
connect(actionReportBug, SIGNAL(triggered()), this, SLOT(bugReport()));
- connect(actionCheckForUpdates, SIGNAL(triggered()), this, SLOT(checkForUpdates()));
- }
+ connect(actionHelp, SIGNAL(triggered()), this, SLOT(onlineHelp()));
- void MainWindowImpl::setupMenu()
+ // Create first toolbar button
diff --git a/ui/dialogoptions.ui b/ui/dialogoptions.ui
index e59148d..6cb23f5 100755
--- a/ui/dialogoptions.ui
@@ -82,18 +82,18 @@ index e59148d..6cb23f5 100755
<layout class="QHBoxLayout" name="horizontalLayout_2">
<item>
diff --git a/ui/mainwindow.ui b/ui/mainwindow.ui
-index a5f55f0..28df8cc 100755
+index 569565f..b86fc2e 100755
--- a/ui/mainwindow.ui
+++ b/ui/mainwindow.ui
-@@ -190,7 +190,6 @@
- </property>
+@@ -356,7 +356,6 @@
+ <addaction name="separator"/>
<addaction name="actionDonatePaypal"/>
<addaction name="separator"/>
- <addaction name="actionCheckForUpdates"/>
<addaction name="actionReportBug"/>
<addaction name="separator"/>
- </widget>
-@@ -1365,14 +1364,6 @@ p, li { white-space: pre-wrap; }
+ <addaction name="actionInfo"/>
+@@ -1412,14 +1411,6 @@ p, li { white-space: pre-wrap; }
<string>Import windows icon (*.ico *.icon)</string>
</property>
</action>
@@ -105,6 +105,6 @@ index a5f55f0..28df8cc 100755
- <string>Check if a new version of Converseen is available</string>
- </property>
- </action>
- </widget>
- <customwidgets>
- <customwidget>
+ <action name="actionAddDirectory">
+ <property name="icon">
+ <iconset resource="../resources.qrc">
diff --git a/gnu/packages/patches/ergodox-firmware-fix-json-target.patch b/gnu/packages/patches/ergodox-firmware-fix-json-target.patch
new file mode 100644
index 0000000000..52da4e2497
--- /dev/null
+++ b/gnu/packages/patches/ergodox-firmware-fix-json-target.patch
@@ -0,0 +1,1405 @@
+Submitted upstream:
+<https://github.com/benblazak/ergodox-firmware/pull/99>
+<https://github.com/benblazak/ergodox-firmware/pull/98>
+
+diff --git a/build-scripts/gen-layout.py b/build-scripts/gen-layout.py
+index fd5e54c..251a463 100755
+--- a/build-scripts/gen-layout.py
++++ b/build-scripts/gen-layout.py
+@@ -22,8 +22,10 @@ import sys
+
+ # -----------------------------------------------------------------------------
+
+-class Namespace():
+- pass
++
++class Namespace:
++ pass
++
+
+ template = Namespace()
+ doc = Namespace()
+@@ -31,45 +33,45 @@ info = Namespace()
+
+ # -----------------------------------------------------------------------------
+
++
+ def main():
+- arg_parser = argparse.ArgumentParser(
+- description = "Generate a picture of the firmware's "
+- + "keyboard layout" )
++ arg_parser = argparse.ArgumentParser(
++ description="Generate a picture of the firmware's " + "keyboard layout"
++ )
+
+- arg_parser.add_argument(
+- '--ui-info-file',
+- required = True )
++ arg_parser.add_argument("--ui-info-file", required=True)
+
+- args = arg_parser.parse_args(sys.argv[1:])
++ args = arg_parser.parse_args(sys.argv[1:])
+
+- # constant file paths
+- args.template_svg_file = './build-scripts/gen_layout/template.svg'
+- args.template_js_file = './build-scripts/gen_layout/template.js'
++ # constant file paths
++ args.template_svg_file = "./build-scripts/gen_layout/template.svg"
++ args.template_js_file = "./build-scripts/gen_layout/template.js"
+
+- # normalize paths
+- args.ui_info_file = os.path.abspath(args.ui_info_file)
+- args.template_svg_file = os.path.abspath(args.template_svg_file)
+- args.template_js_file = os.path.abspath(args.template_js_file)
++ # normalize paths
++ args.ui_info_file = os.path.abspath(args.ui_info_file)
++ args.template_svg_file = os.path.abspath(args.template_svg_file)
++ args.template_js_file = os.path.abspath(args.template_js_file)
+
+- # set vars
+- doc.main = '' # to store the html document we're generating
+- template.svg = open(args.template_svg_file).read()
+- template.js = open(args.template_js_file).read()
+- info.all = json.loads(open(args.ui_info_file).read())
++ # set vars
++ doc.main = "" # to store the html document we're generating
++ template.svg = open(args.template_svg_file).read()
++ template.js = open(args.template_js_file).read()
++ info.all = json.loads(open(args.ui_info_file).read())
+
+- info.matrix_positions = info.all['mappings']['matrix-positions']
+- info.matrix_layout = info.all['mappings']['matrix-layout']
++ info.matrix_positions = info.all["mappings"]["matrix-positions"]
++ info.matrix_layout = info.all["mappings"]["matrix-layout"]
+
+- # prefix
+- doc.prefix = ("""
++ # prefix
++ doc.prefix = (
++ """
+ <?xml version="1.0" encoding="UTF-8" standalone="no"?>
+ <html>
+
+ <head>
+ <script>
+ """
+-+ template.js +
+-""" </script>
++ + template.js
++ + """ </script>
+ </head>
+
+ <body>
+@@ -78,9 +80,13 @@ def main():
+
+ <ul>
+ <li>git commit date:
+- <code>""" + info.all['miscellaneous']['git-commit-date'] + """</code></li>
++ <code>"""
++ + info.all["miscellaneous"]["git-commit-date"]
++ + """</code></li>
+ <li>git commit id:
+- <code>""" + info.all['miscellaneous']['git-commit-id'] + """</code></li>
++ <code>"""
++ + info.all["miscellaneous"]["git-commit-id"]
++ + """</code></li>
+ </ul>
+
+ <h2>Notes</h2>
+@@ -123,301 +129,293 @@ def main():
+
+ <br>
+
+-""")[1:-1]
++"""
++ )[1:-1]
+
+- # suffix
+- doc.suffix = ("""
++ # suffix
++ doc.suffix = (
++ """
+ </body>
+ </html>
+
+-""")[1:-1]
+-
+- # substitute into template
+- # -------
+- # note: this is not general enough to handle any possible layout well, at
+- # the moment. but it should handle more standard ones well. (hopefully
+- # minor) modifications may be necessary on a case by case basis
+- # -------
+- layer_number = -1
+- for (layout, layer) in zip( info.matrix_layout,
+- range(len(info.matrix_layout))):
+- layer_number += 1
+- svg = template.svg
+- for (name, (code, press, release)) \
+- in zip(info.matrix_positions, layout):
+- replace = ''
+- if press == 'kbfun_transparent':
+- replace = ''
+- elif press == 'kbfun_shift_press_release':
+- replace = 'sh ' + keycode_to_string.get(code, '[n/a]')
+- elif press == 'kbfun_jump_to_bootloader':
+- replace = '[btldr]'
+- elif press == 'NULL' and release == 'NULL':
+- replace = '(null)'
+- elif re.search(r'numpad', press+release):
+- replace = '[num]'
+- elif re.search(r'layer', press+release):
+- replace = 'la ' + re.findall(r'\d+', press+release)[0] + ' '
+- if re.search(r'push', press+release):
+- replace += '+'
+- if re.search(r'pop', press+release):
+- replace += '-'
+- replace += ' ' + str(code)
+- else:
+- replace = keycode_to_string.get(code, '[n/a]')
+-
+- svg = re.sub(
+- '>'+name+'<', '>'+replace+'<', svg )
+- svg = re.sub(
+- r"\('(" + name + r".*)'\)",
+- r"('\1', " + str(layer) + r")",
+- svg )
+-
+- doc.main += '<h2>Layer ' + str(layer_number) + '</h2>\n' + svg
+-
+- # change the font size
+- doc.main = re.sub(r'22.5px', '15px', doc.main)
+-
+- print(doc.prefix + doc.main + doc.suffix)
++"""
++ )[1:-1]
++
++ # substitute into template
++ # -------
++ # note: this is not general enough to handle any possible layout well, at
++ # the moment. but it should handle more standard ones well. (hopefully
++ # minor) modifications may be necessary on a case by case basis
++ # -------
++ layer_number = -1
++ for (layout, layer) in zip(
++ info.matrix_layout, range(len(info.matrix_layout))
++ ):
++ layer_number += 1
++ svg = template.svg
++ for (name, (code, press, release)) in zip(
++ info.matrix_positions, layout
++ ):
++ replace = ""
++ if press == "kbfun_transparent":
++ replace = ""
++ elif press == "kbfun_shift_press_release":
++ replace = "sh " + keycode_to_string.get(code, "[n/a]")
++ elif press == "kbfun_jump_to_bootloader":
++ replace = "[btldr]"
++ elif press == "NULL" and release == "NULL":
++ replace = "(null)"
++ elif re.search(r"numpad", press + release):
++ replace = "[num]"
++ elif re.search(r"layer", press + release):
++ replace = "la " + re.findall(r"\d+", press + release)[0] + " "
++ if re.search(r"push", press + release):
++ replace += "+"
++ if re.search(r"pop", press + release):
++ replace += "-"
++ replace += " " + str(code)
++ else:
++ replace = keycode_to_string.get(code, "[n/a]")
++
++ svg = re.sub(">" + name + "<", ">" + replace + "<", svg)
++ svg = re.sub(
++ r"\('(" + name + r".*)'\)", r"('\1', " + str(layer) + r")", svg
++ )
++
++ doc.main += "<h2>Layer " + str(layer_number) + "</h2>\n" + svg
++
++ # change the font size
++ doc.main = re.sub(r"22.5px", "15px", doc.main)
++
++ print(doc.prefix + doc.main + doc.suffix)
++
+
+ # -----------------------------------------------------------------------------
+ # -----------------------------------------------------------------------------
+
+ keycode_to_string = {
+- 0x01: "Error", # ErrorRollOver
+- 0x02: "POSTFail",
+- 0x03: "Error", # ErrorUndefined
+- 0x04: "a A",
+- 0x05: "b B",
+- 0x06: "c C",
+- 0x07: "d D",
+- 0x08: "e E",
+- 0x09: "f F",
+- 0x0A: "g G",
+- 0x0B: "h H",
+- 0x0C: "i I",
+- 0x0D: "j J",
+- 0x0E: "k K",
+- 0x0F: "l L",
+- 0x10: "m M",
+- 0x11: "n N",
+- 0x12: "o O",
+- 0x13: "p P",
+- 0x14: "q Q",
+- 0x15: "r R",
+- 0x16: "s S",
+- 0x17: "t T",
+- 0x18: "u U",
+- 0x19: "v V",
+- 0x1A: "w W",
+- 0x1B: "x X",
+- 0x1C: "y Y",
+- 0x1D: "z Z",
+- 0x1E: "1 !",
+- 0x1F: "2 @",
+- 0x20: "3 #",
+- 0x21: "4 $",
+- 0x22: "5 %",
+- 0x23: "6 ^",
+- 0x24: "7 &",
+- 0x25: "8 *",
+- 0x26: "9 (",
+- 0x27: "0 )",
+- 0x28: "Return",
+- 0x29: "Esc",
+- 0x2A: "Backspace",
+- 0x2B: "Tab",
+- 0x2C: "Space",
+- 0x2D: "- _",
+- 0x2E: "= +",
+- 0x2F: "[ {",
+- 0x30: "] }",
+- 0x31: "\ |",
+- 0x32: "# ~",
+- 0x33: "; :",
+- 0x34: "\' \"",
+- 0x35: "` ~",
+- 0x36: ", <",
+- 0x37: ". >",
+- 0x38: "/ ?",
+- 0x39: "Caps",
+- 0x3A: "F1",
+- 0x3B: "F2",
+- 0x3C: "F3",
+- 0x3D: "F4",
+- 0x3E: "F5",
+- 0x3F: "F6",
+- 0x40: "F7",
+- 0x41: "F8",
+- 0x42: "F9",
+- 0x43: "F10",
+- 0x44: "F11",
+- 0x45: "F12",
+- 0x46: "PrintScreen",
+- 0x47: "ScrollLock",
+- 0x48: "Pause",
+- 0x49: "Ins", # Insert
+- 0x4A: "Hm", # Home
+- 0x4B: "Pg\u2191", # up arrow
+- 0x4C: "Delete",
+- 0x4D: "End",
+- 0x4E: "Pg\u2193", # down arrow
+- 0x4F: "\u2192", # right arrow
+- 0x50: "\u2190", # left arrow
+- 0x51: "\u2193", # down arrow
+- 0x52: "\u2191", # up arrow
+-
+- 0x53: "Num",
+- 0x54: "/",
+- 0x55: "*",
+- 0x56: "-",
+- 0x57: "+",
+- 0x58: "Enter",
+- 0x59: "1 End",
+- 0x5A: "2 \u2193", # down arrow
+- 0x5B: "3 Pg\u2193", # down arrow
+- 0x5C: "4 \u2190", # left arrow
+- 0x5D: "5",
+- 0x5E: "6 \u2192", # right arrow
+- 0x5F: "7 Hm", # Home
+- 0x60: "8 \u2191", # up arrow
+- 0x61: "9 Pg\u2191", # up arrow
+- 0x62: "0 Ins", # Insert
+- 0x63: ". Del",
+-
+- 0x64: "\ |",
+- 0x65: "App",
+- 0x66: "Power",
+-
+- 0x67: "=",
+-
+- 0x68: "F13",
+- 0x69: "F14",
+- 0x6A: "F15",
+- 0x6B: "F16",
+- 0x6C: "F17",
+- 0x6D: "F18",
+- 0x6E: "F19",
+- 0x6F: "F20",
+- 0x70: "F21",
+- 0x71: "F22",
+- 0x72: "F23",
+- 0x73: "F24",
+- 0x74: "Exec",
+- 0x75: "Help",
+- 0x76: "Menu",
+- 0x77: "Select",
+- 0x78: "Stop",
+- 0x79: "Again",
+- 0x7A: "Undo",
+- 0x7B: "Cut",
+- 0x7C: "Copy",
+- 0x7D: "Paste",
+- 0x7E: "Find",
+- 0x7F: "Mute",
+- 0x80: "VolUp",
+- 0x81: "VolDown",
+- 0x82: "LockingCapsLock",
+- 0x83: "LockingNumLock",
+- 0x84: "LockingScrollLock",
+-
+- 0x85: ",",
+- 0x86: "=",
+-
+- 0x87: "Int1",
+- 0x88: "Int2",
+- 0x89: "Int3",
+- 0x8A: "Int4",
+- 0x8B: "Int5",
+- 0x8C: "Int6",
+- 0x8D: "Int7",
+- 0x8E: "Int8",
+- 0x8F: "Int9",
+- 0x90: "LANG1",
+- 0x91: "LANG2",
+- 0x92: "LANG3",
+- 0x93: "LANG4",
+- 0x94: "LANG5",
+- 0x95: "LANG6",
+- 0x96: "LANG7",
+- 0x97: "LANG8",
+- 0x98: "LANG9",
+- 0x99: "AlternateErase",
+- 0x9A: "SysReq_Attention",
+- 0x9B: "Cancel",
+- 0x9C: "Clear",
+- 0x9D: "Prior",
+- 0x9E: "Return",
+- 0x9F: "Separator",
+- 0xA0: "Out",
+- 0xA1: "Oper",
+- 0xA2: "Clear_Again",
+- 0xA3: "CrSel_Props",
+- 0xA4: "ExSel",
+-
+- 0xB0: "00",
+- 0xB1: "000",
+-
+- 0xB2: "Thousands_Sep",
+- 0xB3: "Decimal_Sep",
+- 0xB4: "$",
+- 0xB5: "Currency_Subunit",
+-
+- 0xB6: "(",
+- 0xB7: ")",
+- 0xB8: "{",
+- 0xB9: "}",
+-
+- 0xBA: "Tab",
+- 0xBB: "Backspace",
+- 0xBC: "A",
+- 0xBD: "B",
+- 0xBE: "C",
+- 0xBF: "D",
+- 0xC0: "E",
+- 0xC1: "F",
+- 0xC2: "XOR",
+- 0xC3: "^",
+- 0xC4: "%",
+- 0xC5: "<",
+- 0xC6: ">",
+- 0xC7: "&",
+- 0xC8: "&&",
+- 0xC9: "|",
+- 0xCA: "||",
+- 0xCB: ":",
+- 0xCC: "#",
+- 0xCD: "Space",
+- 0xCE: "@",
+- 0xCF: "!",
+- 0xD0: "Mem_Store",
+- 0xD1: "Mem_Recall",
+- 0xD2: "Mem_Clear",
+- 0xD3: "Mem_+",
+- 0xD4: "Mem_-",
+- 0xD5: "Mem_*",
+- 0xD6: "Mem_/",
+- 0xD7: "+-",
+- 0xD8: "Clear",
+- 0xD9: "ClearEntry",
+- 0xDA: "Binary",
+- 0xDB: "Octal",
+- 0xDC: ".",
+- 0xDD: "Hexadecimal",
+-
+- 0xE0: "L-Ctrl",
+- 0xE1: "L-Shift",
+- 0xE2: "L-Alt",
+- 0xE3: "L-GUI",
+- 0xE4: "R-Ctrl",
+- 0xE5: "R-Shift",
+- 0xE6: "R-Alt",
+- 0xE7: "R-GUI",
+- }
++ 0x01: "Error", # ErrorRollOver
++ 0x02: "POSTFail",
++ 0x03: "Error", # ErrorUndefined
++ 0x04: "a A",
++ 0x05: "b B",
++ 0x06: "c C",
++ 0x07: "d D",
++ 0x08: "e E",
++ 0x09: "f F",
++ 0x0A: "g G",
++ 0x0B: "h H",
++ 0x0C: "i I",
++ 0x0D: "j J",
++ 0x0E: "k K",
++ 0x0F: "l L",
++ 0x10: "m M",
++ 0x11: "n N",
++ 0x12: "o O",
++ 0x13: "p P",
++ 0x14: "q Q",
++ 0x15: "r R",
++ 0x16: "s S",
++ 0x17: "t T",
++ 0x18: "u U",
++ 0x19: "v V",
++ 0x1A: "w W",
++ 0x1B: "x X",
++ 0x1C: "y Y",
++ 0x1D: "z Z",
++ 0x1E: "1 !",
++ 0x1F: "2 @",
++ 0x20: "3 #",
++ 0x21: "4 $",
++ 0x22: "5 %",
++ 0x23: "6 ^",
++ 0x24: "7 &",
++ 0x25: "8 *",
++ 0x26: "9 (",
++ 0x27: "0 )",
++ 0x28: "Return",
++ 0x29: "Esc",
++ 0x2A: "Backspace",
++ 0x2B: "Tab",
++ 0x2C: "Space",
++ 0x2D: "- _",
++ 0x2E: "= +",
++ 0x2F: "[ {",
++ 0x30: "] }",
++ 0x31: "\ |",
++ 0x32: "# ~",
++ 0x33: "; :",
++ 0x34: "' \"",
++ 0x35: "` ~",
++ 0x36: ", <",
++ 0x37: ". >",
++ 0x38: "/ ?",
++ 0x39: "Caps",
++ 0x3A: "F1",
++ 0x3B: "F2",
++ 0x3C: "F3",
++ 0x3D: "F4",
++ 0x3E: "F5",
++ 0x3F: "F6",
++ 0x40: "F7",
++ 0x41: "F8",
++ 0x42: "F9",
++ 0x43: "F10",
++ 0x44: "F11",
++ 0x45: "F12",
++ 0x46: "PrintScreen",
++ 0x47: "ScrollLock",
++ 0x48: "Pause",
++ 0x49: "Ins", # Insert
++ 0x4A: "Hm", # Home
++ 0x4B: "Pg\u2191", # up arrow
++ 0x4C: "Delete",
++ 0x4D: "End",
++ 0x4E: "Pg\u2193", # down arrow
++ 0x4F: "\u2192", # right arrow
++ 0x50: "\u2190", # left arrow
++ 0x51: "\u2193", # down arrow
++ 0x52: "\u2191", # up arrow
++ 0x53: "Num",
++ 0x54: "/",
++ 0x55: "*",
++ 0x56: "-",
++ 0x57: "+",
++ 0x58: "Enter",
++ 0x59: "1 End",
++ 0x5A: "2 \u2193", # down arrow
++ 0x5B: "3 Pg\u2193", # down arrow
++ 0x5C: "4 \u2190", # left arrow
++ 0x5D: "5",
++ 0x5E: "6 \u2192", # right arrow
++ 0x5F: "7 Hm", # Home
++ 0x60: "8 \u2191", # up arrow
++ 0x61: "9 Pg\u2191", # up arrow
++ 0x62: "0 Ins", # Insert
++ 0x63: ". Del",
++ 0x64: "\ |",
++ 0x65: "App",
++ 0x66: "Power",
++ 0x67: "=",
++ 0x68: "F13",
++ 0x69: "F14",
++ 0x6A: "F15",
++ 0x6B: "F16",
++ 0x6C: "F17",
++ 0x6D: "F18",
++ 0x6E: "F19",
++ 0x6F: "F20",
++ 0x70: "F21",
++ 0x71: "F22",
++ 0x72: "F23",
++ 0x73: "F24",
++ 0x74: "Exec",
++ 0x75: "Help",
++ 0x76: "Menu",
++ 0x77: "Select",
++ 0x78: "Stop",
++ 0x79: "Again",
++ 0x7A: "Undo",
++ 0x7B: "Cut",
++ 0x7C: "Copy",
++ 0x7D: "Paste",
++ 0x7E: "Find",
++ 0x7F: "Mute",
++ 0x80: "VolUp",
++ 0x81: "VolDown",
++ 0x82: "LockingCapsLock",
++ 0x83: "LockingNumLock",
++ 0x84: "LockingScrollLock",
++ 0x85: ",",
++ 0x86: "=",
++ 0x87: "Int1",
++ 0x88: "Int2",
++ 0x89: "Int3",
++ 0x8A: "Int4",
++ 0x8B: "Int5",
++ 0x8C: "Int6",
++ 0x8D: "Int7",
++ 0x8E: "Int8",
++ 0x8F: "Int9",
++ 0x90: "LANG1",
++ 0x91: "LANG2",
++ 0x92: "LANG3",
++ 0x93: "LANG4",
++ 0x94: "LANG5",
++ 0x95: "LANG6",
++ 0x96: "LANG7",
++ 0x97: "LANG8",
++ 0x98: "LANG9",
++ 0x99: "AlternateErase",
++ 0x9A: "SysReq_Attention",
++ 0x9B: "Cancel",
++ 0x9C: "Clear",
++ 0x9D: "Prior",
++ 0x9E: "Return",
++ 0x9F: "Separator",
++ 0xA0: "Out",
++ 0xA1: "Oper",
++ 0xA2: "Clear_Again",
++ 0xA3: "CrSel_Props",
++ 0xA4: "ExSel",
++ 0xB0: "00",
++ 0xB1: "000",
++ 0xB2: "Thousands_Sep",
++ 0xB3: "Decimal_Sep",
++ 0xB4: "$",
++ 0xB5: "Currency_Subunit",
++ 0xB6: "(",
++ 0xB7: ")",
++ 0xB8: "{",
++ 0xB9: "}",
++ 0xBA: "Tab",
++ 0xBB: "Backspace",
++ 0xBC: "A",
++ 0xBD: "B",
++ 0xBE: "C",
++ 0xBF: "D",
++ 0xC0: "E",
++ 0xC1: "F",
++ 0xC2: "XOR",
++ 0xC3: "^",
++ 0xC4: "%",
++ 0xC5: "<",
++ 0xC6: ">",
++ 0xC7: "&",
++ 0xC8: "&&",
++ 0xC9: "|",
++ 0xCA: "||",
++ 0xCB: ":",
++ 0xCC: "#",
++ 0xCD: "Space",
++ 0xCE: "@",
++ 0xCF: "!",
++ 0xD0: "Mem_Store",
++ 0xD1: "Mem_Recall",
++ 0xD2: "Mem_Clear",
++ 0xD3: "Mem_+",
++ 0xD4: "Mem_-",
++ 0xD5: "Mem_*",
++ 0xD6: "Mem_/",
++ 0xD7: "+-",
++ 0xD8: "Clear",
++ 0xD9: "ClearEntry",
++ 0xDA: "Binary",
++ 0xDB: "Octal",
++ 0xDC: ".",
++ 0xDD: "Hexadecimal",
++ 0xE0: "L-Ctrl",
++ 0xE1: "L-Shift",
++ 0xE2: "L-Alt",
++ 0xE3: "L-GUI",
++ 0xE4: "R-Ctrl",
++ 0xE5: "R-Shift",
++ 0xE6: "R-Alt",
++ 0xE7: "R-GUI",
++}
+
+ # -----------------------------------------------------------------------------
+ # -----------------------------------------------------------------------------
+
+-if __name__ == '__main__':
+- main()
+-
++if __name__ == "__main__":
++ main()
+diff --git a/build-scripts/gen-ui-info.py b/build-scripts/gen-ui-info.py
+index 1c93d32..0fa52e3 100755
+--- a/build-scripts/gen-ui-info.py
++++ b/build-scripts/gen-ui-info.py
+@@ -13,7 +13,16 @@ Depends on:
+ - the project '.map' file (generated by the compiler)
+ """
+
+-_FORMAT_DESCRIPTION = ("""
++import argparse
++import json
++import os
++import pathlib
++import re
++import subprocess
++import sys
++
++_FORMAT_DESCRIPTION = (
++ """
+ /* ----------------------------------------------------------------------------
+ * Version 0
+ * ----------------------------------------------------------------------------
+@@ -31,7 +40,7 @@ var ui_info = {
+ ".meta-data": { // for the JSON file
+ "version": "<number>",
+ "date-generated": "<string>", // format: RFC 3339
+- "description": "<string>",
++ "description": "<string>",
+ },
+ "keyboard-functions": {
+ "<(function name)>": {
+@@ -57,7 +66,7 @@ var ui_info = {
+ "..."
+ },
+ "mappings": {
+- /*
++ /*
+ * The mappings prefixed with 'matrix' have their elements in the same
+ * order as the .hex file (whatever order that is). The mappings
+ * prefixed with 'physical' will have their elements in an order
+@@ -113,365 +122,304 @@ var ui_info = {
+ "number-of-layers": "<number>"
+ }
+ }
+-""")[1:-1]
++"""
++)[1:-1]
+
+ # -----------------------------------------------------------------------------
+
+-import argparse
+-import json
+-import os
+-import re
+-import subprocess
+-import sys
+-
+-# -----------------------------------------------------------------------------
+
+ def gen_static(current_date=None, git_commit_date=None, git_commit_id=None):
+- """Generate static information"""
+-
+- return {
+- '.meta-data': {
+- 'version': 0, # the format version number
+- 'date-generated': current_date,
+- 'description': _FORMAT_DESCRIPTION,
+- },
+- 'miscellaneous': {
+- 'git-commit-date': git_commit_date, # should be passed by makefile
+- 'git-commit-id': git_commit_id, # should be passed by makefile
+- },
+- }
+-
+-def gen_derived(data):
+- return {} # don't really need this info anymore
+-# """
+-# Generate derived information
+-# Should be called last
+-# """
+-# return {
+-# 'miscellaneous': {
+-# 'number-of-layers':
+-# int( data['layout-matrices']['_kb_layout']['length']/(6*14) ),
+-# # because 6*14 is the number of bytes/layer for '_kb_layout'
+-# # (which is a uint8_t matrix)
+-# },
+-# }
+-
+-# -----------------------------------------------------------------------------
++ """Generate static information"""
+
+-def parse_mapfile(map_file_path):
+- return {} # don't really need this info anymore
+-# """Parse the '.map' file"""
+-#
+-# def parse_keyboard_function(f, line):
+-# """Parse keyboard-functions in the '.map' file"""
+-#
+-# search = re.search(r'(0x\S+)\s+(0x\S+)', next(f))
+-# position = int( search.group(1), 16 )
+-# length = int( search.group(2), 16 )
+-#
+-# search = re.search(r'0x\S+\s+(\S+)', next(f))
+-# name = search.group(1)
+-#
+-# return {
+-# 'keyboard-functions': {
+-# name: {
+-# 'position': position,
+-# 'length': length,
+-# },
+-# },
+-# }
+-#
+-# def parse_layout_matrices(f, line):
+-# """Parse layout matrix information in the '.map' file"""
+-#
+-# name = re.search(r'.progmem.data.(_kb_layout\S*)', line).group(1)
+-#
+-# search = re.search(r'(0x\S+)\s+(0x\S+)', next(f))
+-# position = int( search.group(1), 16 )
+-# length = int( search.group(2), 16 )
+-#
+-# return {
+-# 'layout-matrices': {
+-# name: {
+-# 'position': position,
+-# 'length': length,
+-# },
+-# },
+-# }
+-#
+-# # --- parse_mapfile() ---
+-#
+-# # normalize paths
+-# map_file_path = os.path.abspath(map_file_path)
+-# # check paths
+-# if not os.path.exists(map_file_path):
+-# raise ValueError("invalid 'map_file_path' given")
+-#
+-# output = {}
+-#
+-# f = open(map_file_path)
+-#
+-# for line in f:
+-# if re.search(r'^\s*\.text\.kbfun_', line):
+-# dict_merge(output, parse_keyboard_function(f, line))
+-# elif re.search(r'^\s*\.progmem\.data.*layout', line):
+-# dict_merge(output, parse_layout_matrices(f, line))
+-#
+-# return output
++ return {
++ ".meta-data": {
++ "version": 0, # the format version number
++ "date-generated": current_date,
++ "description": _FORMAT_DESCRIPTION,
++ },
++ "miscellaneous": {
++ "git-commit-date": git_commit_date, # should be passed by makefile
++ "git-commit-id": git_commit_id, # should be passed by makefile
++ },
++ }
+
+
+ def find_keyboard_functions(source_code_path):
+- """Parse all files in the source directory"""
+-
+- def read_comments(f, line):
+- """
+- Read in properly formatted multi-line comments
+- - Comments must start with '/*' and end with '*/', each on their own
+- line
+- """
+- comments = ''
+- while(line.strip() != r'*/'):
+- comments += line[2:].strip()+'\n'
+- line = next(f)
+- return comments
+-
+- def parse_comments(comments):
+- """
+- Parse an INI style comment string
+- - Fields begin with '[field-name]', and continue until the next field,
+- or the end of the comment
+- - Fields '[name]', '[description]', and '[note]' are treated specially
+- """
+-
+- def add_field(output, field, value):
+- """Put a field+value pair in 'output', the way we want it, if the
+- pair is valid"""
+-
+- value = value.strip()
+-
+- if field is not None:
+- if field in ('name', 'description'):
+- if field not in output:
+- output[field] = value
+- else:
+- if field == 'note':
+- field = 'notes'
+-
+- if field not in output:
+- output[field] = []
+-
+- output[field] += [value]
+-
+- # --- parse_comments() ---
+-
+- output = {}
+-
+- field = None
+- value = None
+- for line in comments.split('\n'):
+- line = line.strip()
+-
+- if re.search(r'^\[.*\]$', line):
+- add_field(output, field, value)
+- field = line[1:-1]
+- value = None
+-
+- else:
+- if value is None:
+- value = ''
+- if len(value) > 0 and value[-1] == '.':
+- line = ' '+line
+- value += ' '+line
+-
+- add_field(output, field, value)
+-
+- return output
+-
+- def parse_keyboard_function(f, line, comments):
+- """Parse keyboard-functions in the source code"""
+-
+- search = re.search(r'void\s+(kbfun_\S+)\s*\(void\)', line)
+- name = search.group(1)
+-
+- return {
+- 'keyboard-functions': {
+- name: {
+- 'comments': parse_comments(comments),
+- },
+- },
+- }
+-
+- # --- find_keyboard_functions() ---
+-
+- # normalize paths
+- source_code_path = os.path.abspath(source_code_path)
+- # check paths
+- if not os.path.exists(source_code_path):
+- raise ValueError("invalid 'source_code_path' given")
+-
+- output = {}
+-
+- for tup in os.walk(source_code_path):
+- for file_name in tup[2]:
+- # normalize paths
+- file_name = os.path.abspath( os.path.join( tup[0], file_name ) )
+-
+- # ignore non '.c' files
+- if file_name[-2:] != '.c':
+- continue
+-
+- f = open(file_name)
+-
+- comments = ''
+- for line in f:
+- if line.strip() == r'/*':
+- comments = read_comments(f, line)
+- elif re.search(r'void\s+kbfun_\S+\s*\(void\)', line):
+- dict_merge(
+- output,
+- parse_keyboard_function(f, line, comments) )
+-
+- return output
++ """Parse all files in the source directory"""
++
++ def read_comments(f, line):
++ """
++ Read in properly formatted multi-line comments
++ - Comments must start with '/*' and end with '*/', each on their own
++ line
++ """
++ comments = ""
++ while line.strip() != r"*/":
++ comments += line[2:].strip() + "\n"
++ line = next(f)
++ return comments
++
++ def parse_comments(comments):
++ """
++ Parse an INI style comment string
++ - Fields begin with '[field-name]', and continue until the next field,
++ or the end of the comment
++ - Fields '[name]', '[description]', and '[note]' are treated specially
++ """
++
++ def add_field(output, field, value):
++ """Put a field+value pair in 'output', the way we want it, if the
++ pair is valid"""
++
++ value = value.strip()
++
++ if field is not None:
++ if field in ("name", "description"):
++ if field not in output:
++ output[field] = value
++ else:
++ if field == "note":
++ field = "notes"
++
++ if field not in output:
++ output[field] = []
++
++ output[field] += [value]
++
++ # --- parse_comments() ---
++
++ output = {}
++
++ field = None
++ value = None
++ for line in comments.split("\n"):
++ line = line.strip()
++
++ if re.search(r"^\[.*\]$", line):
++ add_field(output, field, value)
++ field = line[1:-1]
++ value = None
++ else:
++ if value is None:
++ value = ""
++ if len(value) > 0 and value[-1] == ".":
++ line = " " + line
++ value += " " + line
++
++ add_field(output, field, value)
++
++ return output
++
++ def parse_keyboard_function(f, line, comments):
++ """Parse keyboard-functions in the source code"""
++
++ search = re.search(r"void\s+(kbfun_\S+)\s*\(void\)", line)
++ name = search.group(1)
++
++ return {
++ "keyboard-functions": {
++ name: {
++ "comments": parse_comments(comments),
++ },
++ },
++ }
++
++ # --- find_keyboard_functions() ---
++
++ # normalize paths
++ source_code_path = os.path.abspath(source_code_path)
++ # check paths
++ if not os.path.exists(source_code_path):
++ raise ValueError("invalid 'source_code_path' given")
++
++ output = {}
++
++ for tup in os.walk(source_code_path):
++ for file_name in tup[2]:
++ # normalize paths
++ file_name = os.path.abspath(os.path.join(tup[0], file_name))
++
++ # ignore non '.c' files
++ if file_name[-2:] != ".c":
++ continue
++
++ f = open(file_name)
++
++ comments = ""
++ for line in f:
++ if line.strip() == r"/*":
++ comments = read_comments(f, line)
++ elif re.search(r"void\s+kbfun_\S+\s*\(void\)", line):
++ dict_merge(
++ output, parse_keyboard_function(f, line, comments)
++ )
++
++ return output
+
+
+ def gen_mappings(matrix_file_path, layout_file_path):
+- # normalize paths
+- matrix_file_path = os.path.abspath(matrix_file_path)
+- layout_file_path = os.path.abspath(layout_file_path)
+-
+- def parse_matrix_file(matrix_file_path):
+- match = re.search( # find the whole 'KB_MATRIX_LAYER' macro
+- r'#define\s+KB_MATRIX_LAYER\s*\(([^)]+)\)[^{]*\{\{([^#]+)\}\}',
+- open(matrix_file_path).read() )
+-
+- return {
+- "mappings": {
+- "physical-positions": re.findall(r'k..', match.group(1)),
+- "matrix-positions": re.findall(r'k..|na', match.group(2)),
+- },
+- }
+-
+- def parse_layout_file(layout_file_path):
+- match = re.findall( # find each whole '_kb_layout*' matrix definition
+- r'(_kb_layout\w*)[^=]*=((?:[^{}]*\{){3}[^=]*(?:[^{}]*\}){3})',
+- subprocess.getoutput("gcc -E '"+layout_file_path+"'") )
+-
+- layout = {}
+- # collect all the values
+- for (name, matrix) in match:
+- layout[name] = [
+- re.findall( # find all numbers and function pointers
+- r'[x0-9A-F]+|&\w+|NULL',
+- re.sub( # replace '((void *) 0)' with 'NULL'
+- r'\(\s*\(\s*void\s*\*\s*\)\s*0\s*\)',
+- 'NULL',
+- el ) )
+- for el in
+- re.findall( # find each whole layer
+- r'(?:[^{}]*\{){2}((?:[^}]|\}\s*,)+)(?:[^{}]*\}){2}',
+- matrix ) ]
+-
+- # make the numbers into actual numbers
+- layout['_kb_layout'] = \
+- [[eval(el) for el in layer] for layer in layout['_kb_layout']]
+- # remove the preceeding '&' from function pointers
+- for matrix in ('_kb_layout_press', '_kb_layout_release'):
+- layout[matrix] = \
+- [ [re.sub(r'&', '', el) for el in layer]
+- for layer in layout[matrix] ]
+-
+- return {
+- "mappings": {
+- "matrix-layout":
+- # group them all properly
+- [ [[c, p, r] for (c, p, r) in zip(code, press, release)]
+- for (code, press, release) in
+- zip( layout['_kb_layout'],
+- layout['_kb_layout_press'],
+- layout['_kb_layout_release'] ) ]
+- },
+- }
+-
+- return dict_merge(
+- parse_matrix_file(matrix_file_path),
+- parse_layout_file(layout_file_path) )
++ # normalize paths
++ matrix_file_path = os.path.abspath(matrix_file_path)
++ layout_file_path = os.path.abspath(layout_file_path)
++ layout_name = pathlib.Path(layout_file_path).with_suffix('').name
++
++ def parse_matrix_file(matrix_file_path):
++ match = re.search( # find the whole 'KB_MATRIX_LAYER' macro
++ r"#define\s+KB_MATRIX_LAYER\s*\(([^)]+)\)[^{]*\{\{([^#]+)\}\}",
++ open(matrix_file_path).read(),
++ )
++
++ return {
++ "mappings": {
++ "physical-positions": re.findall(r"k..", match.group(1)),
++ "matrix-positions": re.findall(r"k..|na", match.group(2)),
++ },
++ }
++
++ def parse_layout_file(layout_file_path):
++ output = subprocess.check_output(
++ ['avr-gcc', f'-DMAKEFILE_KEYBOARD_LAYOUT={layout_name}',
++ '-E', layout_file_path], encoding='UTF-8')
++ match = re.findall( # find each whole '_kb_layout*' matrix definition
++ r"(_kb_layout\w*)[^=]*=((?:[^{}]*\{){3}[^=]*(?:[^{}]*\}){3})",
++ output,
++ )
++
++ layout = {}
++ # collect all the values
++ for (name, matrix) in match:
++ layout[name] = [
++ re.findall( # find all numbers and function pointers
++ r"[x0-9A-F]+|&\w+|NULL",
++ re.sub( # replace '((void *) 0)' with 'NULL'
++ r"\(\s*\(\s*void\s*\*\s*\)\s*0\s*\)", "NULL", el
++ ),
++ )
++ for el in re.findall( # find each whole layer
++ r"(?:[^{}]*\{){2}((?:[^}]|\}\s*,)+)(?:[^{}]*\}){2}", matrix
++ )
++ ]
++
++ # make the numbers into actual numbers
++ layout["_kb_layout"] = [
++ [eval(el) for el in layer] for layer in layout["_kb_layout"]
++ ]
++ # remove the preceeding '&' from function pointers
++ for matrix in ("_kb_layout_press", "_kb_layout_release"):
++ layout[matrix] = [
++ [re.sub(r"&", "", el) for el in layer]
++ for layer in layout[matrix]
++ ]
++
++ return {
++ "mappings": {
++ "matrix-layout":
++ # group them all properly
++ [
++ [[c, p, r] for (c, p, r) in zip(code, press, release)]
++ for (code, press, release) in zip(
++ layout["_kb_layout"],
++ layout["_kb_layout_press"],
++ layout["_kb_layout_release"],
++ )
++ ]
++ },
++ }
++
++ return dict_merge(
++ parse_matrix_file(matrix_file_path),
++ parse_layout_file(layout_file_path),
++ )
+
+
+ # -----------------------------------------------------------------------------
+
++
+ def dict_merge(a, b):
+- """
+- Recursively merge two dictionaries
+- - I was looking around for an easy way to do this, and found something
+- [here]
+- (http://www.xormedia.com/recursively-merge-dictionaries-in-python.html).
+- This is pretty close, but i didn't copy it exactly.
+- """
++ """
++ Recursively merge two dictionaries
++ - I was looking around for an easy way to do this, and found something
++ [here]
++ (http://www.xormedia.com/recursively-merge-dictionaries-in-python.html).
++ This is pretty close, but i didn't copy it exactly.
++ """
++
++ if not isinstance(a, dict) or not isinstance(b, dict):
++ return b
+
+- if not isinstance(a, dict) or not isinstance(b, dict):
+- return b
++ for (key, value) in b.items():
++ if key in a:
++ a[key] = dict_merge(a[key], value)
++ else:
++ a[key] = value
+
+- for (key, value) in b.items():
+- if key in a:
+- a[key] = dict_merge(a[key], value)
+- else:
+- a[key] = value
++ return a
+
+- return a
+
+ # -----------------------------------------------------------------------------
+
++
+ def main():
+- arg_parser = argparse.ArgumentParser(
+- description = 'Generate project data for use with the UI' )
+-
+- arg_parser.add_argument(
+- '--current-date',
+- help = ( "should be in the format rfc-3339 "
+- + "(e.g. 2006-08-07 12:34:56-06:00)" ),
+- required = True )
+- arg_parser.add_argument(
+- '--git-commit-date',
+- help = ( "should be in the format rfc-3339 "
+- + "(e.g. 2006-08-07 12:34:56-06:00)" ),
+- required = True )
+- arg_parser.add_argument(
+- '--git-commit-id',
+- help = "the git commit ID",
+- required = True )
+- arg_parser.add_argument(
+- '--map-file-path',
+- help = "the path to the '.map' file",
+- required = True )
+- arg_parser.add_argument(
+- '--source-code-path',
+- help = "the path to the source code directory",
+- required = True )
+- arg_parser.add_argument(
+- '--matrix-file-path',
+- help = "the path to the matrix file we're using",
+- required = True )
+- arg_parser.add_argument(
+- '--layout-file-path',
+- help = "the path to the layout file we're using",
+- required = True )
+-
+- args = arg_parser.parse_args(sys.argv[1:])
+-
+- output = {}
+- dict_merge( output, gen_static( args.current_date,
+- args.git_commit_date,
+- args.git_commit_id ) )
+- dict_merge(output, parse_mapfile(args.map_file_path))
+- dict_merge(output, find_keyboard_functions(args.source_code_path))
+- dict_merge(output, gen_mappings( args.matrix_file_path,
+- args.layout_file_path ))
+- dict_merge(output, gen_derived(output))
+-
+- print(json.dumps(output, sort_keys=True, indent=4))
++ arg_parser = argparse.ArgumentParser(
++ description="Generate project data for use with the UI"
++ )
++
++ arg_parser.add_argument(
++ "--current-date",
++ help=(
++ "should be in the format rfc-3339 "
++ "(e.g. 2006-08-07 12:34:56-06:00)"
++ ),
++ required=True,
++ )
++ arg_parser.add_argument(
++ "--git-commit-date",
++ help=(
++ "should be in the format rfc-3339 "
++ "(e.g. 2006-08-07 12:34:56-06:00)"
++ ),
++ required=True,
++ )
++ arg_parser.add_argument(
++ "--git-commit-id", help="the git commit ID", required=True
++ )
++ arg_parser.add_argument(
++ "--map-file-path", help="the path to the '.map' file", required=True
++ )
++ arg_parser.add_argument(
++ "--source-code-path",
++ help="the path to the source code directory",
++ required=True,
++ )
++ arg_parser.add_argument(
++ "--matrix-file-path",
++ help="the path to the matrix file we're using",
++ required=True,
++ )
++ arg_parser.add_argument(
++ "--layout-file-path",
++ help="the path to the layout file we're using",
++ required=True,
++ )
++
++ args = arg_parser.parse_args(sys.argv[1:])
++
++ output = {}
++ dict_merge(
++ output,
++ gen_static(
++ args.current_date, args.git_commit_date, args.git_commit_id
++ )
++ )
++ dict_merge(output, find_keyboard_functions(args.source_code_path))
++ dict_merge(
++ output, gen_mappings(args.matrix_file_path, args.layout_file_path)
++ )
++
++ print(json.dumps(output, sort_keys=True, indent=4))
+
+-# -----------------------------------------------------------------------------
+
+-if __name__ == '__main__':
+- main()
++# -----------------------------------------------------------------------------
+
++if __name__ == "__main__":
++ main()
+diff --git a/makefile b/makefile
+index d9fe10c..971ee0e 100644
+--- a/makefile
++++ b/makefile
+@@ -58,24 +58,27 @@ SCRIPTS := build-scripts
+ all: dist
+
+ clean:
+- git clean -dX # remove ignored files and directories
+- -rm -r '$(BUILD)'
++ git clean -fdX # remove ignored files and directories
++ rm -rf '$(BUILD)'
+
+ checkin:
+ -git commit -a
+
+ build-dir:
+- -rm -r '$(BUILD)/$(TARGET)'*
+- -mkdir -p '$(BUILD)/$(TARGET)'
++ rm -rf '$(BUILD)/$(TARGET)'*
++ mkdir -p '$(BUILD)/$(TARGET)'
+
+ firmware:
+ cd src; $(MAKE) LAYOUT=$(LAYOUT) all
+
+-$(ROOT)/firmware.%: firmware
++$(ROOT):
++ mkdir -p '$@'
++
++$(ROOT)/firmware.%: firmware $(ROOT)
+ cp 'src/firmware.$*' '$@'
+
+
+-$(ROOT)/firmware--ui-info.json: $(SCRIPTS)/gen-ui-info.py checkin
++$(ROOT)/firmware--ui-info.json: $(SCRIPTS)/gen-ui-info.py checkin firmware
+ ( ./'$<' \
+ --current-date '$(shell $(DATE_PROG) --rfc-3339 s)' \
+ --git-commit-date '$(GIT_COMMIT_DATE)' \
diff --git a/gnu/packages/patches/ergodox-firmware-fix-numpad.patch b/gnu/packages/patches/ergodox-firmware-fix-numpad.patch
new file mode 100644
index 0000000000..47af9f8398
--- /dev/null
+++ b/gnu/packages/patches/ergodox-firmware-fix-numpad.patch
@@ -0,0 +1,18 @@
+Submitted upstream: https://github.com/benblazak/ergodox-firmware/pull/100
+
+diff --git a/src/lib/key-functions/public/special.c b/src/lib/key-functions/public/special.c
+index 42aba45..6488137 100644
+--- a/src/lib/key-functions/public/special.c
++++ b/src/lib/key-functions/public/special.c
+@@ -102,9 +102,9 @@ void kbfun_2_keys_capslock_press_release(void) {
+ static uint8_t numpad_layer_id;
+
+ static inline void numpad_toggle_numlock(void) {
+- _kbfun_press_release(true, KEY_LockingNumLock);
++ _kbfun_press_release(true, KEYPAD_NumLock_Clear);
+ usb_keyboard_send();
+- _kbfun_press_release(false, KEY_LockingNumLock);
++ _kbfun_press_release(false, KEYPAD_NumLock_Clear);
+ usb_keyboard_send();
+ }
+
diff --git a/gnu/packages/patches/glibc-2.35-CVE-2023-4911.patch b/gnu/packages/patches/glibc-2.35-CVE-2023-4911.patch
new file mode 100644
index 0000000000..d8044f064d
--- /dev/null
+++ b/gnu/packages/patches/glibc-2.35-CVE-2023-4911.patch
@@ -0,0 +1,160 @@
+From 1056e5b4c3f2d90ed2b4a55f96add28da2f4c8fa Mon Sep 17 00:00:00 2001
+From: Siddhesh Poyarekar <siddhesh@sourceware.org>
+Date: Tue, 19 Sep 2023 18:39:32 -0400
+Subject: [PATCH 1/1] tunables: Terminate if end of input is reached
+ (CVE-2023-4911)
+
+The string parsing routine may end up writing beyond bounds of tunestr
+if the input tunable string is malformed, of the form name=name=val.
+This gets processed twice, first as name=name=val and next as name=val,
+resulting in tunestr being name=name=val:name=val, thus overflowing
+tunestr.
+
+Terminate the parsing loop at the first instance itself so that tunestr
+does not overflow.
+
+This also fixes up tst-env-setuid-tunables to actually handle failures
+correct and add new tests to validate the fix for this CVE.
+
+Signed-off-by: Siddhesh Poyarekar <siddhesh@sourceware.org>
+Reviewed-by: Carlos O'Donell <carlos@redhat.com>
+---
+Backported to 2.35 by Liliana Marie Prikler <liliana.prikler@gmail.com>
+
+ NEWS | 5 +++++
+ elf/dl-tunables.c | 17 +++++++++-------
+ elf/tst-env-setuid-tunables.c | 37 +++++++++++++++++++++++++++--------
+ 3 files changed, 44 insertions(+), 15 deletions(-)
+
+Index: glibc-2.35/NEWS
+===================================================================
+--- glibc-2.35.orig/NEWS
++++ glibc-2.35/NEWS
+@@ -199,6 +199,11 @@ Security related changes:
+ corresponds to the / directory through an unprivileged mount
+ namespace. Reported by Qualys.
+
++ CVE-2023-4911: If a tunable of the form NAME=NAME=VAL is passed in the
++ environment of a setuid program and NAME is valid, it may result in a
++ buffer overflow, which could be exploited to achieve escalated
++ privileges. This flaw was introduced in glibc 2.34.
++
+ The following bugs are resolved with this release:
+
+ [12889] nptl: Race condition in pthread_kill
+Index: glibc-2.35/elf/dl-tunables.c
+===================================================================
+--- glibc-2.35.orig/elf/dl-tunables.c
++++ glibc-2.35/elf/dl-tunables.c
+@@ -187,11 +187,7 @@ parse_tunables (char *tunestr, char *val
+ /* If we reach the end of the string before getting a valid name-value
+ pair, bail out. */
+ if (p[len] == '\0')
+- {
+- if (__libc_enable_secure)
+- tunestr[off] = '\0';
+- return;
+- }
++ break;
+
+ /* We did not find a valid name-value pair before encountering the
+ colon. */
+@@ -251,9 +247,16 @@ parse_tunables (char *tunestr, char *val
+ }
+ }
+
+- if (p[len] != '\0')
+- p += len + 1;
++ /* We reached the end while processing the tunable string. */
++ if (p[len] == '\0')
++ break;
++
++ p += len + 1;
+ }
++
++ /* Terminate tunestr before we leave. */
++ if (__libc_enable_secure)
++ tunestr[off] = '\0';
+ }
+ #endif
+
+Index: glibc-2.35/elf/tst-env-setuid-tunables.c
+===================================================================
+--- glibc-2.35.orig/elf/tst-env-setuid-tunables.c
++++ glibc-2.35/elf/tst-env-setuid-tunables.c
+@@ -52,6 +52,8 @@ const char *teststrings[] =
+ "glibc.malloc.perturb=0x800:not_valid.malloc.check=2:glibc.malloc.mmap_threshold=4096",
+ "glibc.not_valid.check=2:glibc.malloc.mmap_threshold=4096",
+ "not_valid.malloc.check=2:glibc.malloc.mmap_threshold=4096",
++ "glibc.malloc.mmap_threshold=glibc.malloc.mmap_threshold=4096",
++ "glibc.malloc.check=2",
+ "glibc.malloc.garbage=2:glibc.maoc.mmap_threshold=4096:glibc.malloc.check=2",
+ "glibc.malloc.check=4:glibc.malloc.garbage=2:glibc.maoc.mmap_threshold=4096",
+ ":glibc.malloc.garbage=2:glibc.malloc.check=1",
+@@ -70,6 +72,8 @@ const char *resultstrings[] =
+ "glibc.malloc.perturb=0x800:glibc.malloc.mmap_threshold=4096",
+ "glibc.malloc.mmap_threshold=4096",
+ "glibc.malloc.mmap_threshold=4096",
++ "glibc.malloc.mmap_threshold=glibc.malloc.mmap_threshold=4096",
++ "",
+ "",
+ "",
+ "",
+@@ -89,6 +93,8 @@ test_child (int off)
+
+ if (val != NULL)
+ printf ("[%d] Unexpected GLIBC_TUNABLES VALUE %s\n", off, val);
++ else
++ printf ("[%d] GLIBC_TUNABLES environment variable absent\n", off);
+
+ return 1;
+ #else
+@@ -117,21 +123,26 @@ do_test (int argc, char **argv)
+ if (ret != 0)
+ exit (1);
+
+- exit (EXIT_SUCCESS);
++ /* Special return code to make sure that the child executed all the way
++ through. */
++ exit (42);
+ }
+ else
+ {
+- int ret = 0;
+-
+ /* Spawn tests. */
+ for (int i = 0; i < array_length (teststrings); i++)
+ {
+ char buf[INT_BUFSIZE_BOUND (int)];
+
+- printf ("Spawned test for %s (%d)\n", teststrings[i], i);
++ printf ("[%d] Spawned test for %s\n", i, teststrings[i]);
+ snprintf (buf, sizeof (buf), "%d\n", i);
++ fflush (stdout);
+ if (setenv ("GLIBC_TUNABLES", teststrings[i], 1) != 0)
+- exit (1);
++ {
++ printf (" [%d] Failed to set GLIBC_TUNABLES: %m", i);
++ support_record_failure ();
++ continue;
++ }
+
+ int status = support_capture_subprogram_self_sgid (buf);
+
+@@ -139,9 +150,14 @@ do_test (int argc, char **argv)
+ if (WEXITSTATUS (status) == EXIT_UNSUPPORTED)
+ return EXIT_UNSUPPORTED;
+
+- ret |= status;
++ if (WEXITSTATUS (status) != 42)
++ {
++ printf (" [%d] child failed with status %d\n", i,
++ WEXITSTATUS (status));
++ support_record_failure ();
++ }
+ }
+- return ret;
++ return 0;
+ }
+ }
+
diff --git a/gnu/packages/patches/icecat-compare-paths.patch b/gnu/packages/patches/icecat-compare-paths.patch
new file mode 100644
index 0000000000..69c03e05ee
--- /dev/null
+++ b/gnu/packages/patches/icecat-compare-paths.patch
@@ -0,0 +1,21 @@
+See comment in gnu/build/icecat-extension.scm.
+
+--- a/toolkit/mozapps/extensions/internal/XPIDatabase.jsm
++++ b/toolkit/mozapps/extensions/internal/XPIDatabase.jsm
+@@ -3452,6 +3452,7 @@ const XPIDatabaseReconcile = {
+ if (
+ newAddon ||
+ oldAddon.updateDate != xpiState.mtime ||
++ oldAddon.path != xpiState.path ||
+ (aUpdateCompatibility && this.isAppBundledLocation(installLocation))
+ ) {
+ newAddon = this.updateMetadata(
+@@ -3460,8 +3461,6 @@ const XPIDatabaseReconcile = {
+ xpiState,
+ newAddon
+ );
+- } else if (oldAddon.path != xpiState.path) {
+- newAddon = this.updatePath(installLocation, oldAddon, xpiState);
+ } else if (aUpdateCompatibility || aSchemaChange) {
+ newAddon = this.updateCompatibility(
+ installLocation,
diff --git a/gnu/packages/patches/icecat-use-system-wide-dir.patch b/gnu/packages/patches/icecat-use-system-wide-dir.patch
new file mode 100644
index 0000000000..a635a4d18e
--- /dev/null
+++ b/gnu/packages/patches/icecat-use-system-wide-dir.patch
@@ -0,0 +1,36 @@
+Replace "/usr/lib/mozilla" (the system-wide directory for extensions and
+native manifests) with "$ICECAT_SYSTEM_DIR".
+
+--- a/toolkit/xre/nsXREDirProvider.cpp
++++ b/toolkit/xre/nsXREDirProvider.cpp
+@@ -293,24 +293,12 @@ nsresult nsXREDirProvider::GetBackgroundTasksProfilesRootDir(
+ static nsresult GetSystemParentDirectory(nsIFile** aFile) {
+ nsresult rv;
+ nsCOMPtr<nsIFile> localDir;
+-# if defined(XP_MACOSX)
+- rv = GetOSXFolderType(kOnSystemDisk, kApplicationSupportFolderType,
+- getter_AddRefs(localDir));
+- if (NS_SUCCEEDED(rv)) {
+- rv = localDir->AppendNative("Mozilla"_ns);
+- }
+-# else
+- constexpr auto dirname =
+-# ifdef HAVE_USR_LIB64_DIR
+- "/usr/lib64/mozilla"_ns
+-# elif defined(__OpenBSD__) || defined(__FreeBSD__)
+- "/usr/local/lib/mozilla"_ns
+-# else
+- "/usr/lib/mozilla"_ns
+-# endif
+- ;
+- rv = NS_NewNativeLocalFile(dirname, false, getter_AddRefs(localDir));
+-# endif
++
++ const char* systemParentDir = getenv("ICECAT_SYSTEM_DIR");
++ if (!systemParentDir || !*systemParentDir) return NS_ERROR_FAILURE;
++
++ rv = NS_NewNativeLocalFile(nsDependentCString(systemParentDir), false,
++ getter_AddRefs(localDir));
+
+ if (NS_SUCCEEDED(rv)) {
+ localDir.forget(aFile);
diff --git a/gnu/packages/patches/qmk-firmware-fix-hacker-dvorak.patch b/gnu/packages/patches/qmk-firmware-fix-hacker-dvorak.patch
new file mode 100644
index 0000000000..69e68cc8e1
--- /dev/null
+++ b/gnu/packages/patches/qmk-firmware-fix-hacker-dvorak.patch
@@ -0,0 +1,15 @@
+Submitted upstream: https://github.com/qmk/qmk_firmware/pull/22102
+
+diff --git a/keyboards/ergodox_ez/keymaps/hacker_dvorak/user/layer_set_state_user.c b/keyboards/ergodox_ez/keymaps/hacker_dvorak/user/layer_set_state_user.c
+index bd4fd10ad1..5ce5f5298f 100644
+--- a/keyboards/ergodox_ez/keymaps/hacker_dvorak/user/layer_set_state_user.c
++++ b/keyboards/ergodox_ez/keymaps/hacker_dvorak/user/layer_set_state_user.c
+@@ -4,7 +4,7 @@ layer_state_t layer_state_set_user(layer_state_t state) {
+
+ switch (layer) {
+ case DVORAK:
+- rgblight_sethsv_noeeprom(GREEN);
++ rgblight_sethsv_noeeprom(HSV_GREEN);
+ rgblight_mode_noeeprom(RGBLIGHT_MODE_STATIC_LIGHT);
+
+ if (PLOVER_MODE) {
diff --git a/gnu/packages/patches/qtbase-5-use-TZDIR.patch b/gnu/packages/patches/qtbase-5-use-TZDIR.patch
new file mode 100644
index 0000000000..b6c377b133
--- /dev/null
+++ b/gnu/packages/patches/qtbase-5-use-TZDIR.patch
@@ -0,0 +1,39 @@
+Use $TZDIR to search for time-zone data. Thus avoid depending on package
+"tzdata", which often introduces changes with near-immediate effects, so it's
+important to be able to update it fast.
+
+Based on a patch fron NixOS.
+===================================================================
+--- qtbase-opensource-src-5.14.2.orig/src/corelib/time/qtimezoneprivate_tz.cpp
++++ qtbase-opensource-src-5.15.2/src/corelib/time/qtimezoneprivate_tz.cpp
+@@ -70,7 +70,11 @@
+ // Parse zone.tab table, assume lists all installed zones, if not will need to read directories
+ static QTzTimeZoneHash loadTzTimeZones()
+ {
+- QString path = QStringLiteral("/usr/share/zoneinfo/zone.tab");
++ // Try TZDIR first, in case we're running on GuixSD.
++ QString path = QFile::decodeName(qgetenv("TZDIR")) + QStringLiteral("/zone.tab");
++ // Fallback to traditional paths in case we are not on GuixSD.
++ if (!QFile::exists(path))
++ path = QStringLiteral("/usr/share/zoneinfo/zone.tab");
+ if (!QFile::exists(path))
+ path = QStringLiteral("/usr/lib/zoneinfo/zone.tab");
+
+@@ -645,6 +649,9 @@
+ if (!tzif.open(QIODevice::ReadOnly))
+ return;
+ } else {
++ // Try TZDIR first, in case we're running on GuixSD.
++ tzif.setFileName(QFile::decodeName(qgetenv("TZDIR")) + QStringLiteral("/") + QString::fromLocal8Bit(ianaId));
++ if (!tzif.open(QIODevice::ReadOnly)) {
+ // Open named tz, try modern path first, if fails try legacy path
+ tzif.setFileName(QLatin1String("/usr/share/zoneinfo/") + QString::fromLocal8Bit(ianaId));
+ if (!tzif.open(QIODevice::ReadOnly)) {
+@@ -652,6 +659,7 @@
+ if (!tzif.open(QIODevice::ReadOnly))
+ return;
+ }
++ }
+ }
+
+ QDataStream ds(&tzif);
diff --git a/gnu/packages/patches/qtbase-use-TZDIR.patch b/gnu/packages/patches/qtbase-use-TZDIR.patch
index b6c377b133..98bf7493e9 100644
--- a/gnu/packages/patches/qtbase-use-TZDIR.patch
+++ b/gnu/packages/patches/qtbase-use-TZDIR.patch
@@ -1,39 +1,141 @@
-Use $TZDIR to search for time-zone data. Thus avoid depending on package
-"tzdata", which often introduces changes with near-immediate effects, so it's
-important to be able to update it fast.
+From 1075606f8b2f9e153c82f8e50cbd69cea9c72e87 Mon Sep 17 00:00:00 2001
+From: Edward Welbourne <edward.welbourne@qt.io>
+Date: Mon, 11 Sep 2023 11:41:39 +0200
+Subject: [PATCH] Support the TZDIR environment variable
-Based on a patch fron NixOS.
-===================================================================
---- qtbase-opensource-src-5.14.2.orig/src/corelib/time/qtimezoneprivate_tz.cpp
-+++ qtbase-opensource-src-5.15.2/src/corelib/time/qtimezoneprivate_tz.cpp
-@@ -70,7 +70,11 @@
- // Parse zone.tab table, assume lists all installed zones, if not will need to read directories
+On Linux / glibc, this overrides the default system location for the
+zone info. So check for files there first. Break out a function to
+manage the trying of (now three) zoneinfo directories when opening a
+file by name relative to there.
+
+Pick-to: 6.6 6.5
+Task-number: QTBUG-116017
+Change-Id: I1f97107aabd9015c0a5543639870f1d70654ca67
+---
+* Rebased on top of v6.5.2.
+
+ src/corelib/time/qtimezoneprivate_tz.cpp | 73 ++++++++++++++++--------
+ 1 file changed, 49 insertions(+), 24 deletions(-)
+
+diff --git a/src/corelib/time/qtimezoneprivate_tz.cpp b/src/corelib/time/qtimezoneprivate_tz.cpp
+index 067191d816..a8b2fc894e 100644
+--- a/src/corelib/time/qtimezoneprivate_tz.cpp
++++ b/src/corelib/time/qtimezoneprivate_tz.cpp
+@@ -51,17 +51,41 @@ typedef QHash<QByteArray, QTzTimeZone> QTzTimeZoneHash;
+
+ static bool isTzFile(const QString &name);
+
++// Open a named file under the zone info directory:
++static bool openZoneInfo(QString name, QFile *file)
++{
++ // At least on Linux / glibc (see man 3 tzset), $TZDIR overrides the system
++ // default location for zone info:
++ const QString tzdir = qEnvironmentVariable("TZDIR");
++ if (!tzdir.isEmpty()) {
++ file->setFileName(QDir(tzdir).filePath(name));
++ if (file->open(QIODevice::ReadOnly))
++ return true;
++ }
++ // Try modern system path first:
++ constexpr auto zoneShare = "/usr/share/zoneinfo/"_L1;
++ if (tzdir != zoneShare && tzdir != zoneShare.chopped(1)) {
++ file->setFileName(zoneShare + name);
++ if (file->open(QIODevice::ReadOnly))
++ return true;
++ }
++ // Fall back to legacy system path:
++ constexpr auto zoneLib = "/usr/lib/zoneinfo/"_L1;
++ if (tzdir != zoneLib && tzdir != zoneLib.chopped(1)) {
++ file->setFileName(zoneShare + name);
++ if (file->open(QIODevice::ReadOnly))
++ return true;
++ }
++ return false;
++}
++
+ // Parse zone.tab table for territory information, read directories to ensure we
+ // find all installed zones (many are omitted from zone.tab; even more from
+ // zone1970.tab).
static QTzTimeZoneHash loadTzTimeZones()
{
- QString path = QStringLiteral("/usr/share/zoneinfo/zone.tab");
-+ // Try TZDIR first, in case we're running on GuixSD.
-+ QString path = QFile::decodeName(qgetenv("TZDIR")) + QStringLiteral("/zone.tab");
-+ // Fallback to traditional paths in case we are not on GuixSD.
-+ if (!QFile::exists(path))
-+ path = QStringLiteral("/usr/share/zoneinfo/zone.tab");
- if (!QFile::exists(path))
- path = QStringLiteral("/usr/lib/zoneinfo/zone.tab");
+- if (!QFile::exists(path))
+- path = QStringLiteral("/usr/lib/zoneinfo/zone.tab");
+-
+- QFile tzif(path);
+- if (!tzif.open(QIODevice::ReadOnly))
++ QFile tzif;
++ if (!openZoneInfo("zone.tab"_L1, &tzif))
+ return QTzTimeZoneHash();
-@@ -645,6 +649,9 @@
+ QTzTimeZoneHash zonesHash;
+@@ -91,6 +115,7 @@ static QTzTimeZoneHash loadTzTimeZones()
+ }
+ }
+
++ const QString path = tzif.fileName();
+ const qsizetype cut = path.lastIndexOf(u'/');
+ Q_ASSERT(cut > 0);
+ const QDir zoneDir = QDir(path.first(cut));
+@@ -761,20 +786,13 @@ QTzTimeZoneCacheEntry QTzTimeZoneCache::findEntry(const QByteArray &ianaId)
+ tzif.setFileName(QStringLiteral("/etc/localtime"));
if (!tzif.open(QIODevice::ReadOnly))
- return;
- } else {
-+ // Try TZDIR first, in case we're running on GuixSD.
-+ tzif.setFileName(QFile::decodeName(qgetenv("TZDIR")) + QStringLiteral("/") + QString::fromLocal8Bit(ianaId));
-+ if (!tzif.open(QIODevice::ReadOnly)) {
- // Open named tz, try modern path first, if fails try legacy path
- tzif.setFileName(QLatin1String("/usr/share/zoneinfo/") + QString::fromLocal8Bit(ianaId));
- if (!tzif.open(QIODevice::ReadOnly)) {
-@@ -652,6 +659,7 @@
- if (!tzif.open(QIODevice::ReadOnly))
- return;
+ return ret;
+- } else {
+- // Open named tz, try modern path first, if fails try legacy path
+- tzif.setFileName("/usr/share/zoneinfo/"_L1 + QString::fromLocal8Bit(ianaId));
+- if (!tzif.open(QIODevice::ReadOnly)) {
+- tzif.setFileName("/usr/lib/zoneinfo/"_L1 + QString::fromLocal8Bit(ianaId));
+- if (!tzif.open(QIODevice::ReadOnly)) {
+- // ianaId may be a POSIX rule, taken from $TZ or /etc/TZ
+- auto check = validatePosixRule(ianaId);
+- if (check.isValid) {
+- ret.m_hasDst = check.hasDst;
+- ret.m_posixRule = ianaId;
+- }
+- return ret;
+- }
++ } else if (!openZoneInfo(QString::fromLocal8Bit(ianaId), &tzif)) {
++ // ianaId may be a POSIX rule, taken from $TZ or /etc/TZ
++ auto check = validatePosixRule(ianaId);
++ if (check.isValid) {
++ ret.m_hasDst = check.hasDst;
++ ret.m_posixRule = ianaId;
++ return ret;
}
-+ }
}
- QDataStream ds(&tzif);
+@@ -1317,7 +1335,8 @@ private:
+ {
+ // On most distros /etc/localtime is a symlink to a real file so extract
+ // name from the path
+- const auto zoneinfo = "/zoneinfo/"_L1;
++ const QString tzdir = qEnvironmentVariable("TZDIR");
++ constexpr auto zoneinfo = "/zoneinfo/"_L1;
+ QString path = QStringLiteral("/etc/localtime");
+ long iteration = getSymloopMax();
+ // Symlink may point to another symlink etc. before being under zoneinfo/
+@@ -1325,9 +1344,15 @@ private:
+ // symlink, like America/Montreal pointing to America/Toronto
+ do {
+ path = QFile::symLinkTarget(path);
+- int index = path.indexOf(zoneinfo);
+- if (index >= 0) // Found zoneinfo file; extract zone name from path:
+- return QStringView{ path }.mid(index + zoneinfo.size()).toUtf8();
++ // If it's a zoneinfo file, extract the zone name from its path:
++ int index = tzdir.isEmpty() ? -1 : path.indexOf(tzdir);
++ if (index >= 0) {
++ const auto tail = QStringView{ path }.sliced(index + tzdir.size()).toUtf8();
++ return tail.startsWith(u'/') ? tail.sliced(1) : tail;
++ }
++ index = path.indexOf(zoneinfo);
++ if (index >= 0)
++ return QStringView{ path }.sliced(index + zoneinfo.size()).toUtf8();
+ } while (!path.isEmpty() && --iteration > 0);
+
+ return QByteArray();
+
+base-commit: af457a9f0f7eb1a2a7d11f495da508faab91a442
+--
+2.41.0
+
diff --git a/gnu/packages/patches/rust-1.70-fix-rustix-build.patch b/gnu/packages/patches/rust-1.70-fix-rustix-build.patch
new file mode 100644
index 0000000000..d72a8fb07a
--- /dev/null
+++ b/gnu/packages/patches/rust-1.70-fix-rustix-build.patch
@@ -0,0 +1,21 @@
+@@ -0,0 +1,20 @@
+--- a/vendor/fd-lock/Cargo.toml 2023-05-31 14:44:48.000000000 -0700
++++ b/vendor/fd-lock/Cargo.toml 2023-07-14 21:19:34.637702319 -0700
+@@ -45,7 +45,7 @@
+
+ [target."cfg(unix)".dependencies.rustix]
+ version = "0.37.0"
+-features = ["fs"]
++features = ["fs", "cc"]
+
+ [target."cfg(windows)".dependencies.windows-sys]
+ version = "0.45.0"
+--- a/src/bootstrap/Cargo.lock 2023-07-11 20:32:40.000000000 -0700
++++ b/src/bootstrap/Cargo.lock 2023-07-14 22:41:53.269284713 -0700
+@@ -618,6 +618,7 @@
+ dependencies = [
+ "bitflags",
++ "cc",
+ "errno",
+ "io-lifetimes",
+ "libc",
diff --git a/gnu/packages/patches/rust-openssl-sys-no-vendor.patch b/gnu/packages/patches/rust-openssl-sys-no-vendor.patch
deleted file mode 100644
index 5872d4cf22..0000000000
--- a/gnu/packages/patches/rust-openssl-sys-no-vendor.patch
+++ /dev/null
@@ -1,32 +0,0 @@
---- openssl-sys-0.9.84/Cargo.toml.orig 2023-04-03 09:10:11.979197979 -0400
-+++ openssl-sys-0.9.84/Cargo.toml 2023-04-03 12:07:30.285315609 -0400
-@@ -31,10 +31,6 @@
- [package.metadata.pkg-config]
- openssl = "1.0.1"
-
--[dependencies.bssl-sys]
--version = "0.1.0"
--optional = true
--
- [dependencies.libc]
- version = "0.2"
-
-@@ -46,16 +42,12 @@
- [build-dependencies.cc]
- version = "1.0"
-
--[build-dependencies.openssl-src]
--version = "111"
--optional = true
--
- [build-dependencies.pkg-config]
- version = "0.3.9"
-
- [features]
--unstable_boringssl = ["bssl-sys"]
--vendored = ["openssl-src"]
-+unstable_boringssl = []
-+vendored = []
-
- [target."cfg(target_env = \"msvc\")".build-dependencies.vcpkg]
- version = "0.2.8"
diff --git a/gnu/packages/patches/teuchos-remove-duplicate-using.patch b/gnu/packages/patches/teuchos-remove-duplicate-using.patch
new file mode 100644
index 0000000000..55cd3f6fc3
--- /dev/null
+++ b/gnu/packages/patches/teuchos-remove-duplicate-using.patch
@@ -0,0 +1,34 @@
+commit e27d9ae98502626d8407045a4e082797682ba56b
+Author: Christian Glusa <caglusa@sandia.gov>
+Date: Fri Jul 3 18:02:27 2020 -0600
+
+ Teuchos Comm test: remove duplicate 'using ...'
+
+diff --git a/packages/teuchos/comm/test/Comm/reduce.cpp b/packages/teuchos/comm/test/Comm/reduce.cpp
+index c05ebc0dc34..eac1975ef83 100644
+--- a/packages/teuchos/comm/test/Comm/reduce.cpp
++++ b/packages/teuchos/comm/test/Comm/reduce.cpp
+@@ -53,9 +53,6 @@ bool
+ testReduceSum (bool& success, std::ostream& out,
+ const int root, const Teuchos::Comm<int>& comm)
+ {
+-#ifdef HAVE_TEUCHOS_MPI
+- using Teuchos::MpiComm;
+-#endif // HAVE_TEUCHOS_MPI
+ using Teuchos::reduce;
+ using Teuchos::TypeNameTraits;
+ using std::endl;
+diff --git a/packages/teuchos/comm/test/Comm/scatter.cpp b/packages/teuchos/comm/test/Comm/scatter.cpp
+index 0ca961d2846..001009029c4 100644
+--- a/packages/teuchos/comm/test/Comm/scatter.cpp
++++ b/packages/teuchos/comm/test/Comm/scatter.cpp
+@@ -53,9 +53,6 @@ bool
+ testScatter (bool& success, std::ostream& out,
+ const int root, const Teuchos::Comm<int>& comm)
+ {
+-#ifdef HAVE_TEUCHOS_MPI
+- using Teuchos::MpiComm;
+-#endif // HAVE_TEUCHOS_MPI
+ using Teuchos::scatter;
+ using Teuchos::TypeNameTraits;
+ using std::endl;
diff --git a/gnu/packages/patches/tootle-glib-object-naming.patch b/gnu/packages/patches/tootle-glib-object-naming.patch
deleted file mode 100644
index 08ee23dd8f..0000000000
--- a/gnu/packages/patches/tootle-glib-object-naming.patch
+++ /dev/null
@@ -1,66 +0,0 @@
-From 0816105028c26965e37c9afc7c598854f3fecde1 Mon Sep 17 00:00:00 2001
-From: Clayton Craft <clayton@craftyguy.net>
-Date: Tue, 26 Oct 2021 15:03:25 -0700
-Subject: [PATCH] Adhere to GLib.Object naming conventions for properties
-
-Vala now validates property names against GLib.Object conventions, this
-fixes a compilation error as a result of this enforcement:
-
-../src/API/Status.vala:27.5-27.23: error: Name `_url' is not valid for a GLib.Object property
- public string? _url { get; set; }
- ^^^^^^^^^^^^^^^^^^^
-
-Relevant Vala change:
-https://gitlab.gnome.org/GNOME/vala/-/commit/38d61fbff037687ea4772e6df85c7e22a74b335e
-
-fixes #337
-
-Signed-off-by: Clayton Craft <clayton@craftyguy.net>
----
- src/API/Attachment.vala | 6 +++---
- src/API/Status.vala | 8 ++++----
- 2 files changed, 7 insertions(+), 7 deletions(-)
-
-diff --git a/src/API/Attachment.vala b/src/API/Attachment.vala
-index 88bc5bb..35c4018 100644
---- a/src/API/Attachment.vala
-+++ b/src/API/Attachment.vala
-@@ -4,10 +4,10 @@ public class Tootle.API.Attachment : Entity, Widgetizable {
- public string kind { get; set; default = "unknown"; }
- public string url { get; set; }
- public string? description { get; set; }
-- public string? _preview_url { get; set; }
-+ private string? t_preview_url { get; set; }
- public string? preview_url {
-- set { this._preview_url = value; }
-- get { return (this._preview_url == null || this._preview_url == "") ? url : _preview_url; }
-+ set { this.t_preview_url = value; }
-+ get { return (this.t_preview_url == null || this.t_preview_url == "") ? url : t_preview_url; }
- }
-
- public File? source_file { get; set; }
-diff --git a/src/API/Status.vala b/src/API/Status.vala
-index 4f92cdb..00e8a9f 100644
---- a/src/API/Status.vala
-+++ b/src/API/Status.vala
-@@ -28,16 +28,16 @@ public class Tootle.API.Status : Entity, Widgetizable {
- public ArrayList<API.Mention>? mentions { get; set; default = null; }
- public ArrayList<API.Attachment>? media_attachments { get; set; default = null; }
-
-- public string? _url { get; set; }
-+ private string? t_url { get; set; }
- public string url {
- owned get { return this.get_modified_url (); }
-- set { this._url = value; }
-+ set { this.t_url = value; }
- }
- string get_modified_url () {
-- if (this._url == null) {
-+ if (this.t_url == null) {
- return this.uri.replace ("/activity", "");
- }
-- return this._url;
-+ return this.t_url;
- }
-
- public Status formal {
diff --git a/gnu/packages/patches/tootle-reason-phrase.patch b/gnu/packages/patches/tootle-reason-phrase.patch
deleted file mode 100644
index 72a1d1ecfa..0000000000
--- a/gnu/packages/patches/tootle-reason-phrase.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-From 858ee78fbebe161a4cdd707a469dc0f045211a51 Mon Sep 17 00:00:00 2001
-From: Max Harmathy <harmathy@mailbox.org>
-Date: Wed, 25 Aug 2021 13:05:58 +0200
-Subject: [PATCH] Use reason_phrase instead of get_phrase
-
----
- src/Services/Cache.vala | 2 +-
- src/Services/Network.vala | 7 +------
- 2 files changed, 2 insertions(+), 7 deletions(-)
-
-diff --git a/src/Services/Cache.vala b/src/Services/Cache.vala
-index 2251697..2ed314e 100644
---- a/src/Services/Cache.vala
-+++ b/src/Services/Cache.vala
-@@ -88,7 +88,7 @@ public class Tootle.Cache : GLib.Object {
- try {
- var code = msg.status_code;
- if (code != Soup.Status.OK) {
-- var error = network.describe_error (code);
-+ var error = msg.reason_phrase;
- throw new Oopsie.INSTANCE (@"Server returned $error");
- }
-
-diff --git a/src/Services/Network.vala b/src/Services/Network.vala
-index fa2839c..d0143b0 100644
---- a/src/Services/Network.vala
-+++ b/src/Services/Network.vala
-@@ -56,7 +56,7 @@ public class Tootle.Network : GLib.Object {
- else if (status == Soup.Status.CANCELLED)
- debug ("Message is cancelled. Ignoring callback invocation.");
- else
-- ecb ((int32) status, describe_error ((int32) status));
-+ ecb ((int32) status, msg.reason_phrase);
- });
- }
- catch (Error e) {
-@@ -65,11 +65,6 @@ public class Tootle.Network : GLib.Object {
- }
- }
-
-- public string describe_error (uint code) {
-- var reason = Soup.Status.get_phrase (code);
-- return @"$code: $reason";
-- }
--
- public void on_error (int32 code, string message) {
- warning (message);
- app.toast (message);
diff --git a/gnu/packages/patches/tpetra-remove-duplicate-using.patch b/gnu/packages/patches/tpetra-remove-duplicate-using.patch
new file mode 100644
index 0000000000..e9cb2c6472
--- /dev/null
+++ b/gnu/packages/patches/tpetra-remove-duplicate-using.patch
@@ -0,0 +1,18 @@
+commit 919ceb0acbf4c6bc4f463433504a338c643612c2
+Author: Karen D. Devine <kddevin@sandia.gov>
+Date: Tue Feb 2 13:49:13 2021 -0700
+
+ tpetra: removed duplicate using statement #8673
+
+diff --git a/packages/tpetra/core/src/Tpetra_Details_FixedHashTable_def.hpp b/packages/tpetra/core/src/Tpetra_Details_FixedHashTable_def.hpp
+index a76f78c41a8..69d4dc46d03 100644
+--- a/packages/tpetra/core/src/Tpetra_Details_FixedHashTable_def.hpp
++++ b/packages/tpetra/core/src/Tpetra_Details_FixedHashTable_def.hpp
+@@ -1094,7 +1094,6 @@ init (const keys_type& keys,
+
+ // Allocate the array of (key,value) pairs. Don't fill it with
+ // zeros, because we will fill it with actual data below.
+- using Kokkos::ViewAllocateWithoutInitializing;
+ typedef typename val_type::non_const_type nonconst_val_type;
+ nonconst_val_type val (ViewAllocateWithoutInitializing ("Tpetra::FixedHashTable::pairs"),
+ theNumKeys);
diff --git a/gnu/packages/patches/u-boot-patman-change-id.patch b/gnu/packages/patches/u-boot-patman-change-id.patch
new file mode 100644
index 0000000000..354aee2755
--- /dev/null
+++ b/gnu/packages/patches/u-boot-patman-change-id.patch
@@ -0,0 +1,232 @@
+Upstream status: https://patchwork.ozlabs.org/project/uboot/patch/20231013030633.7191-1-maxim.cournoyer@gmail.com/
+
+From f83a5e07b0934e38cbee923e0c5b7fc0a890926c Mon Sep 17 00:00:00 2001
+From: Maxim Cournoyer <maxim.cournoyer@gmail.com>
+Date: Thu, 12 Oct 2023 17:04:25 -0400
+Subject: [PATCH] patman: Add a 'keep_change_id' setting
+
+A Change-Id can be useful for traceability purposes, and some projects
+may wish to have them preserved. This change makes it configurable
+via a new 'keep_change_id' setting.
+
+Series-version: 2
+Series-changes: 2
+- Add missing argument to send parser
+---
+ tools/patman/__main__.py | 2 ++
+ tools/patman/control.py | 12 +++++++++---
+ tools/patman/patchstream.py | 17 ++++++++++++-----
+ tools/patman/patman.rst | 11 ++++++-----
+ tools/patman/test_checkpatch.py | 16 ++++++++++++++++
+ 5 files changed, 45 insertions(+), 13 deletions(-)
+
+diff --git a/tools/patman/__main__.py b/tools/patman/__main__.py
+index 8eba5d3486..197ac1aad1 100755
+--- a/tools/patman/__main__.py
++++ b/tools/patman/__main__.py
+@@ -103,6 +103,8 @@ send.add_argument('--no-signoff', action='store_false', dest='add_signoff',
+ default=True, help="Don't add Signed-off-by to patches")
+ send.add_argument('--smtp-server', type=str,
+ help="Specify the SMTP server to 'git send-email'")
++send.add_argument('--keep-change-id', action='store_true',
++ help='Preserve Change-Id tags in patches to send.')
+
+ send.add_argument('patchfiles', nargs='*')
+
+diff --git a/tools/patman/control.py b/tools/patman/control.py
+index 916ddf8fcf..b292da9dc2 100644
+--- a/tools/patman/control.py
++++ b/tools/patman/control.py
+@@ -16,11 +16,14 @@ from patman import gitutil
+ from patman import patchstream
+ from u_boot_pylib import terminal
+
++
+ def setup():
+ """Do required setup before doing anything"""
+ gitutil.setup()
+
+-def prepare_patches(col, branch, count, start, end, ignore_binary, signoff):
++
++def prepare_patches(col, branch, count, start, end, ignore_binary, signoff,
++ keep_change_id=False):
+ """Figure out what patches to generate, then generate them
+
+ The patch files are written to the current directory, e.g. 0001_xxx.patch
+@@ -35,6 +38,7 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff):
+ end (int): End patch to use (0=last one in series, 1=one before that,
+ etc.)
+ ignore_binary (bool): Don't generate patches for binary files
++ keep_change_id (bool): Preserve the Change-Id tag.
+
+ Returns:
+ Tuple:
+@@ -59,11 +63,12 @@ def prepare_patches(col, branch, count, start, end, ignore_binary, signoff):
+ branch, start, to_do, ignore_binary, series, signoff)
+
+ # Fix up the patch files to our liking, and insert the cover letter
+- patchstream.fix_patches(series, patch_files)
++ patchstream.fix_patches(series, patch_files, keep_change_id)
+ if cover_fname and series.get('cover'):
+ patchstream.insert_cover_letter(cover_fname, series, to_do)
+ return series, cover_fname, patch_files
+
++
+ def check_patches(series, patch_files, run_checkpatch, verbose, use_tree):
+ """Run some checks on a set of patches
+
+@@ -166,7 +171,8 @@ def send(args):
+ col = terminal.Color()
+ series, cover_fname, patch_files = prepare_patches(
+ col, args.branch, args.count, args.start, args.end,
+- args.ignore_binary, args.add_signoff)
++ args.ignore_binary, args.add_signoff,
++ keep_change_id=args.keep_change_id)
+ ok = check_patches(series, patch_files, args.check_patch,
+ args.verbose, args.check_patch_use_tree)
+
+diff --git a/tools/patman/patchstream.py b/tools/patman/patchstream.py
+index f91669a940..e2e2a83e67 100644
+--- a/tools/patman/patchstream.py
++++ b/tools/patman/patchstream.py
+@@ -68,6 +68,7 @@ STATE_PATCH_SUBJECT = 1 # In patch subject (first line of log for a commit)
+ STATE_PATCH_HEADER = 2 # In patch header (after the subject)
+ STATE_DIFFS = 3 # In the diff part (past --- line)
+
++
+ class PatchStream:
+ """Class for detecting/injecting tags in a patch or series of patches
+
+@@ -76,7 +77,7 @@ class PatchStream:
+ unwanted tags or inject additional ones. These correspond to the two
+ phases of processing.
+ """
+- def __init__(self, series, is_log=False):
++ def __init__(self, series, is_log=False, keep_change_id=False):
+ self.skip_blank = False # True to skip a single blank line
+ self.found_test = False # Found a TEST= line
+ self.lines_after_test = 0 # Number of lines found after TEST=
+@@ -86,6 +87,7 @@ class PatchStream:
+ self.section = [] # The current section...END section
+ self.series = series # Info about the patch series
+ self.is_log = is_log # True if indent like git log
++ self.keep_change_id = keep_change_id # True to keep Change-Id tags
+ self.in_change = None # Name of the change list we are in
+ self.change_version = 0 # Non-zero if we are in a change list
+ self.change_lines = [] # Lines of the current change
+@@ -452,6 +454,8 @@ class PatchStream:
+
+ # Detect Change-Id tags
+ elif change_id_match:
++ if self.keep_change_id:
++ out = [line]
+ value = change_id_match.group(1)
+ if self.is_log:
+ if self.commit.change_id:
+@@ -763,7 +767,7 @@ def get_metadata_for_test(text):
+ pst.finalise()
+ return series
+
+-def fix_patch(backup_dir, fname, series, cmt):
++def fix_patch(backup_dir, fname, series, cmt, keep_change_id=False):
+ """Fix up a patch file, by adding/removing as required.
+
+ We remove our tags from the patch file, insert changes lists, etc.
+@@ -776,6 +780,7 @@ def fix_patch(backup_dir, fname, series, cmt):
+ fname (str): Filename to patch file to process
+ series (Series): Series information about this patch set
+ cmt (Commit): Commit object for this patch file
++ keep_change_id (bool): Keep the Change-Id tag.
+
+ Return:
+ list: A list of errors, each str, or [] if all ok.
+@@ -783,7 +788,7 @@ def fix_patch(backup_dir, fname, series, cmt):
+ handle, tmpname = tempfile.mkstemp()
+ outfd = os.fdopen(handle, 'w', encoding='utf-8')
+ infd = open(fname, 'r', encoding='utf-8')
+- pst = PatchStream(series)
++ pst = PatchStream(series, keep_change_id=keep_change_id)
+ pst.commit = cmt
+ pst.process_stream(infd, outfd)
+ infd.close()
+@@ -795,7 +800,7 @@ def fix_patch(backup_dir, fname, series, cmt):
+ shutil.move(tmpname, fname)
+ return cmt.warn
+
+-def fix_patches(series, fnames):
++def fix_patches(series, fnames, keep_change_id=False):
+ """Fix up a list of patches identified by filenames
+
+ The patch files are processed in place, and overwritten.
+@@ -803,6 +808,7 @@ def fix_patches(series, fnames):
+ Args:
+ series (Series): The Series object
+ fnames (:type: list of str): List of patch files to process
++ keep_change_id (bool): Keep the Change-Id tag.
+ """
+ # Current workflow creates patches, so we shouldn't need a backup
+ backup_dir = None #tempfile.mkdtemp('clean-patch')
+@@ -811,7 +817,8 @@ def fix_patches(series, fnames):
+ cmt = series.commits[count]
+ cmt.patch = fname
+ cmt.count = count
+- result = fix_patch(backup_dir, fname, series, cmt)
++ result = fix_patch(backup_dir, fname, series, cmt,
++ keep_change_id=keep_change_id)
+ if result:
+ print('%d warning%s for %s:' %
+ (len(result), 's' if len(result) > 1 else '', fname))
+diff --git a/tools/patman/patman.rst b/tools/patman/patman.rst
+index 038b651ee8..a8b317eed6 100644
+--- a/tools/patman/patman.rst
++++ b/tools/patman/patman.rst
+@@ -371,11 +371,12 @@ Series-process-log: sort, uniq
+ Separate each tag with a comma.
+
+ Change-Id:
+- This tag is stripped out but is used to generate the Message-Id
+- of the emails that will be sent. When you keep the Change-Id the
+- same you are asserting that this is a slightly different version
+- (but logically the same patch) as other patches that have been
+- sent out with the same Change-Id.
++ This tag is used to generate the Message-Id of the emails that
++ will be sent. When you keep the Change-Id the same you are
++ asserting that this is a slightly different version (but logically
++ the same patch) as other patches that have been sent out with the
++ same Change-Id. The Change-Id tag line is removed from outgoing
++ patches, unless the `keep_change_id` settings is set to `True`.
+
+ Various other tags are silently removed, like these Chrome OS and
+ Gerrit tags::
+diff --git a/tools/patman/test_checkpatch.py b/tools/patman/test_checkpatch.py
+index a8bb364e42..59a53ef8ca 100644
+--- a/tools/patman/test_checkpatch.py
++++ b/tools/patman/test_checkpatch.py
+@@ -160,6 +160,22 @@ Signed-off-by: Simon Glass <sjg@chromium.org>
+
+ rc = os.system('diff -u %s %s' % (inname, expname))
+ self.assertEqual(rc, 0)
++ os.remove(inname)
++
++ # Test whether the keep_change_id settings works.
++ inhandle, inname = tempfile.mkstemp()
++ infd = os.fdopen(inhandle, 'w', encoding='utf-8')
++ infd.write(data)
++ infd.close()
++
++ patchstream.fix_patch(None, inname, series.Series(), com,
++ keep_change_id=True)
++
++ with open(inname, 'r') as f:
++ content = f.read()
++ self.assertIn(
++ 'Change-Id: I80fe1d0c0b7dd10aa58ce5bb1d9290b6664d5413',
++ content)
+
+ os.remove(inname)
+ os.remove(expname)
+
+base-commit: f9a47ac8d97da2b3aaf463f268a9a872a8d921df
+--
+2.41.0
+
diff --git a/gnu/packages/patches/unison-fix-ocaml-4.08.patch b/gnu/packages/patches/unison-fix-ocaml-4.08.patch
deleted file mode 100644
index 811f590721..0000000000
--- a/gnu/packages/patches/unison-fix-ocaml-4.08.patch
+++ /dev/null
@@ -1,81 +0,0 @@
-This patch is taken from the opam repository:
-https://github.com/ocaml/opam-repository/blob/master/packages/unison/unison.2.51.2/files/ocaml48.patch
-
-It fixes compatibility with changes introduced in OCaml 4.08.
-
-diff --git a/src/Makefile.OCaml b/src/Makefile.OCaml
-index 7cefa2e..378fc8b 100644
---- a/src/Makefile.OCaml
-+++ b/src/Makefile.OCaml
-@@ -272,7 +272,7 @@ endif
-
- # Gtk GUI
- ifeq ($(UISTYLE), gtk)
-- CAMLFLAGS+=-I +lablgtk
-+ CAMLFLAGS+=-I $(LABLGTKLIB)
- OCAMLOBJS+=pixmaps.cmo uigtk.cmo linkgtk.cmo
- OCAMLLIBS+=lablgtk.cma
- endif
-@@ -282,7 +282,7 @@ OCAMLFIND := $(shell command -v ocamlfind 2> /dev/null)
-
- ifeq ($(UISTYLE), gtk2)
- ifndef OCAMLFIND
-- CAMLFLAGS+=-I +lablgtk2
-+ CAMLFLAGS+=-I $(LABLGTK2LIB)
- else
- CAMLFLAGS+=$(shell $(OCAMLFIND) query -i-format lablgtk2 )
- endif
-diff --git a/src/files.ml b/src/files.ml
-index 5ff1881..1d1fbcc 100644
---- a/src/files.ml
-+++ b/src/files.ml
-@@ -734,7 +734,7 @@ let get_files_in_directory dir =
- with End_of_file ->
- dirh.System.closedir ()
- end;
-- Sort.list (<) !files
-+ List.sort String.compare !files
-
- let ls dir pattern =
- Util.convertUnixErrorsToTransient
-diff --git a/src/recon.ml b/src/recon.ml
-index 2c619bb..2412c18 100644
---- a/src/recon.ml
-+++ b/src/recon.ml
-@@ -661,8 +661,8 @@ let rec reconcile
-
- (* Sorts the paths so that they will be displayed in order *)
- let sortPaths pathUpdatesList =
-- Sort.list
-- (fun (p1, _) (p2, _) -> Path.compare p1 p2 <= 0)
-+ List.sort
-+ Path.compare
- pathUpdatesList
-
- let rec enterPath p1 p2 t =
-diff --git a/src/system/system_generic.ml b/src/system/system_generic.ml
-index 453027d..c2288b8 100755
---- a/src/system/system_generic.ml
-+++ b/src/system/system_generic.ml
-@@ -47,7 +47,7 @@ let open_out_gen = open_out_gen
- let chmod = Unix.chmod
- let chown = Unix.chown
- let utimes = Unix.utimes
--let link = Unix.link
-+let link s d = Unix.link s d
- let openfile = Unix.openfile
- let opendir f =
- let h = Unix.opendir f in
-diff --git a/src/uigtk2.ml b/src/uigtk2.ml
-index fbc5d8f..4e82cc2 100644
---- a/src/uigtk2.ml
-+++ b/src/uigtk2.ml
-@@ -94,7 +94,7 @@ let icon =
- let icon =
- let p = GdkPixbuf.create ~width:48 ~height:48 ~has_alpha:true () in
- Gpointer.blit
-- (Gpointer.region_of_string Pixmaps.icon_data) (GdkPixbuf.get_pixels p);
-+ (Gpointer.region_of_bytes Pixmaps.icon_data) (GdkPixbuf.get_pixels p);
- p
-
- let leftPtrWatch =