summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--nixos/doc/manual/from_md/release-notes/rl-2205.section.xml27
-rw-r--r--nixos/doc/manual/release-notes/rl-2205.section.md11
-rw-r--r--nixos/modules/services/matrix/matrix-synapse.xml6
-rw-r--r--nixos/modules/services/networking/pleroma.nix8
-rw-r--r--nixos/modules/services/security/oauth2_proxy.nix10
-rw-r--r--nixos/modules/services/security/tor.nix5
-rw-r--r--nixos/modules/services/x11/display-managers/default.nix19
-rw-r--r--nixos/tests/all-tests.nix2
-rw-r--r--nixos/tests/pleroma.nix20
-rw-r--r--nixos/tests/terminal-emulators.nix207
-rw-r--r--nixos/tests/web-apps/mastodon.nix170
-rw-r--r--nixos/tests/wine.nix13
-rw-r--r--pkgs/applications/audio/cider/default.nix30
-rw-r--r--pkgs/applications/audio/eteroj.lv2/default.nix23
-rw-r--r--pkgs/applications/audio/lv2lint/default.nix22
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/eteroj.nix10
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/generic.nix37
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/jit.nix12
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/mephisto.nix17
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/midi_matrix.nix10
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/moony.nix13
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/orbit.nix12
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/patchmatrix.nix13
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/router.nix11
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/sherlock.nix12
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/synthpod.nix13
-rw-r--r--pkgs/applications/audio/open-music-kontrollers/vm.nix10
-rw-r--r--pkgs/applications/audio/patchmatrix/default.nix45
-rw-r--r--pkgs/applications/audio/pt2-clone/default.nix4
-rw-r--r--pkgs/applications/audio/pyradio/default.nix4
-rw-r--r--pkgs/applications/audio/vocal/default.nix8
-rw-r--r--pkgs/applications/blockchains/alfis/default.nix6
-rw-r--r--pkgs/applications/blockchains/chia/default.nix5
-rw-r--r--pkgs/applications/blockchains/polkadot/default.nix6
-rw-r--r--pkgs/applications/editors/cudatext/default.nix4
-rw-r--r--pkgs/applications/editors/cudatext/deps.json24
-rw-r--r--pkgs/applications/emulators/vice/default.nix120
-rw-r--r--pkgs/applications/emulators/wine/sources.nix6
-rw-r--r--pkgs/applications/emulators/wine/vkd3d.nix8
-rw-r--r--pkgs/applications/graphics/hydrus/default.nix6
-rw-r--r--pkgs/applications/kde/konsole.nix4
-rw-r--r--pkgs/applications/misc/appeditor/default.nix5
-rw-r--r--pkgs/applications/misc/blender/default.nix31
-rw-r--r--pkgs/applications/misc/regextester/default.nix1
-rw-r--r--pkgs/applications/misc/tootle/default.nix5
-rw-r--r--pkgs/applications/networking/browsers/palemoon/default.nix32
-rw-r--r--pkgs/applications/networking/browsers/palemoon/mozconfig4
-rw-r--r--pkgs/applications/networking/cawbird/default.nix4
-rw-r--r--pkgs/applications/networking/cluster/fn-cli/default.nix4
-rw-r--r--pkgs/applications/networking/cluster/werf/default.nix6
-rw-r--r--pkgs/applications/networking/nextcloud-client/default.nix4
-rw-r--r--pkgs/applications/networking/sync/rclone/default.nix9
-rw-r--r--pkgs/applications/office/bookworm/default.nix1
-rw-r--r--pkgs/applications/office/spice-up/default.nix1
-rw-r--r--pkgs/applications/office/zotero/default.nix86
-rw-r--r--pkgs/applications/science/logic/isabelle/default.nix5
-rw-r--r--pkgs/applications/science/logic/naproche/default.nix38
-rw-r--r--pkgs/applications/science/math/nasc/default.nix1
-rw-r--r--pkgs/applications/science/math/qalculate-gtk/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/alacritty/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/contour/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/cool-retro-term/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/ctx/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/darktile/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/eterm/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/germinal/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/guake/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/hyper/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/kermit-terminal/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/kgx/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/kitty/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/lxterminal/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/mlterm/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/mrxvt/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/roxterm/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/rxvt-unicode/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/rxvt-unicode/wrapper.nix6
-rw-r--r--pkgs/applications/terminal-emulators/sakura/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/st/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/stupidterm/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/terminator/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/termite/default.nix7
-rw-r--r--pkgs/applications/terminal-emulators/termonad/default.nix4
-rw-r--r--pkgs/applications/terminal-emulators/tilda/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/tilix/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/wayst/default.nix3
-rw-r--r--pkgs/applications/terminal-emulators/wezterm/default.nix9
-rw-r--r--pkgs/applications/terminal-emulators/xterm/default.nix5
-rw-r--r--pkgs/applications/version-management/gitlab/data.json12
-rw-r--r--pkgs/applications/version-management/gitlab/gitaly/default.nix4
-rw-r--r--pkgs/applications/version-management/gitlab/gitlab-workhorse/default.nix2
-rw-r--r--pkgs/applications/version-management/meld/default.nix11
-rw-r--r--pkgs/applications/virtualization/distrobox/default.nix4
-rw-r--r--pkgs/applications/virtualization/docker/default.nix12
-rw-r--r--pkgs/applications/virtualization/singularity/default.nix4
-rw-r--r--pkgs/applications/window-managers/i3/wsr.nix6
-rw-r--r--pkgs/data/themes/yaru/default.nix4
-rw-r--r--pkgs/desktops/enlightenment/terminology/default.nix4
-rw-r--r--pkgs/desktops/gnome/core/gnome-terminal/default.nix3
-rw-r--r--pkgs/desktops/lxqt/qterminal/default.nix3
-rw-r--r--pkgs/desktops/mate/mate-terminal/default.nix4
-rw-r--r--pkgs/desktops/xfce/applications/xfce4-terminal/default.nix4
-rw-r--r--pkgs/development/haskell-modules/make-package-set.nix16
-rw-r--r--pkgs/development/haskell-modules/package-list.nix8
-rw-r--r--pkgs/development/libraries/exiv2/default.nix1
-rw-r--r--pkgs/development/libraries/gdal/default.nix4
-rw-r--r--pkgs/development/libraries/libqalculate/default.nix4
-rw-r--r--pkgs/development/libraries/mysocketw/default.nix27
-rw-r--r--pkgs/development/libraries/science/math/suitesparse-graphblas/default.nix4
-rw-r--r--pkgs/development/libraries/vte/default.nix4
-rw-r--r--pkgs/development/libraries/yder/default.nix4
-rw-r--r--pkgs/development/python-modules/ailment/default.nix4
-rw-r--r--pkgs/development/python-modules/aiobotocore/default.nix4
-rw-r--r--pkgs/development/python-modules/aioridwell/default.nix10
-rw-r--r--pkgs/development/python-modules/angr/default.nix4
-rw-r--r--pkgs/development/python-modules/angrop/default.nix4
-rw-r--r--pkgs/development/python-modules/archinfo/default.nix14
-rw-r--r--pkgs/development/python-modules/argon2_cffi/default.nix3
-rw-r--r--pkgs/development/python-modules/azure-core/default.nix15
-rw-r--r--pkgs/development/python-modules/broadlink/default.nix11
-rw-r--r--pkgs/development/python-modules/claripy/default.nix4
-rw-r--r--pkgs/development/python-modules/cle/default.nix4
-rw-r--r--pkgs/development/python-modules/cloudsmith-api/default.nix4
-rw-r--r--pkgs/development/python-modules/coqui-trainer/default.nix14
-rw-r--r--pkgs/development/python-modules/discogs-client/default.nix4
-rw-r--r--pkgs/development/python-modules/elementpath/default.nix4
-rw-r--r--pkgs/development/python-modules/gruut-ipa/default.nix17
-rw-r--r--pkgs/development/python-modules/gruut/default.nix4
-rw-r--r--pkgs/development/python-modules/hahomematic/default.nix4
-rw-r--r--pkgs/development/python-modules/invoke/default.nix18
-rw-r--r--pkgs/development/python-modules/limnoria/default.nix4
-rw-r--r--pkgs/development/python-modules/mkdocs-material/default.nix41
-rw-r--r--pkgs/development/python-modules/mkdocs-material/mkdocs-material-extensions.nix24
-rw-r--r--pkgs/development/python-modules/myfitnesspal/default.nix9
-rw-r--r--pkgs/development/python-modules/ocrmypdf/default.nix4
-rw-r--r--pkgs/development/python-modules/pdfminer_six/default.nix13
-rw-r--r--pkgs/development/python-modules/pyaussiebb/default.nix4
-rw-r--r--pkgs/development/python-modules/pysaml2/default.nix4
-rw-r--r--pkgs/development/python-modules/python-crfsuite/default.nix2
-rw-r--r--pkgs/development/python-modules/pyvex/default.nix4
-rw-r--r--pkgs/development/python-modules/qiling/default.nix14
-rw-r--r--pkgs/development/python-modules/ropper/default.nix4
-rw-r--r--pkgs/development/python-modules/scikit-hep-testdata/default.nix4
-rw-r--r--pkgs/development/python-modules/scrapy/default.nix22
-rw-r--r--pkgs/development/python-modules/typed-settings/default.nix4
-rw-r--r--pkgs/development/python-modules/types-pytz/default.nix4
-rw-r--r--pkgs/development/python-modules/types-requests/default.nix4
-rw-r--r--pkgs/development/python-modules/types-tabulate/default.nix4
-rw-r--r--pkgs/development/python-modules/wrf-python/default.nix32
-rw-r--r--pkgs/development/quickemu/default.nix5
-rw-r--r--pkgs/development/quickemu/efi_vars_ensure_writable.patch13
-rw-r--r--pkgs/development/quickemu/input_overrides.patch19
-rw-r--r--pkgs/development/tools/allure/default.nix39
-rw-r--r--pkgs/development/tools/analysis/flow/default.nix4
-rw-r--r--pkgs/development/tools/cask/default.nix16
-rw-r--r--pkgs/development/tools/circup/default.nix2
-rw-r--r--pkgs/development/tools/cloud-nuke/default.nix4
-rw-r--r--pkgs/development/tools/continuous-integration/github-runner/default.nix4
-rw-r--r--pkgs/development/tools/continuous-integration/github-runner/deps.nix3
-rw-r--r--pkgs/development/tools/doctl/default.nix4
-rw-r--r--pkgs/development/tools/ko/default.nix9
-rw-r--r--pkgs/development/tools/ktlint/default.nix4
-rw-r--r--pkgs/development/tools/misc/terraform-ls/default.nix6
-rw-r--r--pkgs/development/tools/misc/terraformer/default.nix6
-rw-r--r--pkgs/development/tools/pulumictl/default.nix4
-rw-r--r--pkgs/development/tools/rover/default.nix56
-rw-r--r--pkgs/development/tools/rover/librusty_v8.nix17
-rw-r--r--pkgs/development/tools/rover/schema/etag.id1
-rw-r--r--pkgs/development/tools/rover/schema/schema.graphql172
-rwxr-xr-xpkgs/development/tools/rover/update.sh96
-rw-r--r--pkgs/development/tools/sentry-cli/default.nix6
-rw-r--r--pkgs/development/tools/skaffold/default.nix6
-rw-r--r--pkgs/development/tools/toml2json/default.nix20
-rw-r--r--pkgs/development/tools/wrangler/default.nix6
-rw-r--r--pkgs/development/tools/yarn/default.nix4
-rw-r--r--pkgs/development/tools/yq-go/default.nix6
-rw-r--r--pkgs/games/rare/default.nix64
-rw-r--r--pkgs/os-specific/linux/pflask/default.nix18
-rw-r--r--pkgs/os-specific/linux/smem/default.nix4
-rw-r--r--pkgs/servers/atlassian/jira.nix4
-rw-r--r--pkgs/servers/dns/knot-resolver/default.nix4
-rw-r--r--pkgs/servers/imgproxy/default.nix6
-rw-r--r--pkgs/servers/jackett/default.nix4
-rw-r--r--pkgs/servers/jackett/deps.nix8
-rw-r--r--pkgs/servers/mastodon/default.nix4
-rw-r--r--pkgs/servers/misc/virtiofsd/default.nix6
-rw-r--r--pkgs/servers/monitoring/prometheus/apcupsd-exporter.nix8
-rw-r--r--pkgs/servers/monitoring/prometheus/wireguard-exporter.nix6
-rw-r--r--pkgs/servers/pleroma/default.nix31
-rw-r--r--pkgs/servers/sabnzbd/default.nix4
-rw-r--r--pkgs/servers/sql/materialize/default.nix11
-rw-r--r--pkgs/servers/tailscale/default.nix4
-rw-r--r--pkgs/servers/web-apps/wordpress/default.nix4
-rw-r--r--pkgs/test/haskell/shellFor/default.nix11
-rw-r--r--pkgs/tools/X11/jumpapp/default.nix4
-rw-r--r--pkgs/tools/admin/exoscale-cli/default.nix4
-rw-r--r--pkgs/tools/admin/trinsic-cli/default.nix4
-rw-r--r--pkgs/tools/filesystems/squashfs/darwin.patch36
-rw-r--r--pkgs/tools/filesystems/squashfs/default.nix25
-rw-r--r--pkgs/tools/misc/broadlink-cli/default.nix4
-rw-r--r--pkgs/tools/misc/diffoscope/default.nix11
-rw-r--r--pkgs/tools/misc/dua/default.nix6
-rw-r--r--pkgs/tools/misc/hashit/default.nix1
-rw-r--r--pkgs/tools/misc/onefetch/default.nix11
-rw-r--r--pkgs/tools/misc/opentelemetry-collector/contrib.nix6
-rw-r--r--pkgs/tools/misc/steampipe/default.nix6
-rw-r--r--pkgs/tools/networking/corerad/default.nix10
-rw-r--r--pkgs/tools/networking/openapi-generator-cli/unstable.nix6
-rw-r--r--pkgs/tools/networking/tinyssh/default.nix4
-rw-r--r--pkgs/tools/security/minio-certgen/default.nix4
-rw-r--r--pkgs/tools/text/snippetpixie/default.nix2
-rw-r--r--pkgs/tools/text/vale/default.nix6
-rw-r--r--pkgs/tools/wayland/swaykbdd/default.nix4
-rw-r--r--pkgs/top-level/aliases.nix2
-rw-r--r--pkgs/top-level/all-packages.nix32
-rw-r--r--pkgs/top-level/python-packages.nix6
216 files changed, 2123 insertions, 644 deletions
diff --git a/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml b/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml
index 48e85b1a5e7..348374026b4 100644
--- a/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml
+++ b/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml
@@ -468,6 +468,12 @@
           freeform type.
         </para>
         <para>
+          The <literal>listeners.*.bind_address</literal> option was
+          renamed to <literal>bind_addresses</literal> in order to match
+          the upstream <literal>homeserver.yaml</literal> option name.
+          It is now also a list of strings instead of a string.
+        </para>
+        <para>
           An example to make the required migration clearer:
         </para>
         <para>
@@ -528,7 +534,7 @@
 
       listeners = [ {
         port = 8448;
-        bind_address = [
+        bind_addresses = [
           &quot;::&quot;
           &quot;0.0.0.0&quot;
         ];
@@ -559,7 +565,14 @@
           Additionally a few option defaults have been synced up with
           upstream default values, for example the
           <literal>max_upload_size</literal> grew from
-          <literal>10M</literal> to <literal>50M</literal>.
+          <literal>10M</literal> to <literal>50M</literal>. For the same
+          reason, the default <literal>media_store_path</literal> was
+          changed from <literal>${dataDir}/media</literal> to
+          <literal>${dataDir}/media_store</literal> if
+          <literal>system.stateVersion</literal> is at least
+          <literal>22.05</literal>. Files will need to be manually moved
+          to the new location if the <literal>stateVersion</literal> is
+          updated.
         </para>
       </listitem>
       <listitem>
@@ -825,6 +838,16 @@
       </listitem>
       <listitem>
         <para>
+          The Tor SOCKS proxy is now actually disabled if
+          <literal>services.tor.client.enable</literal> is set to
+          <literal>false</literal> (the default). If you are using this
+          functionality but didn’t change the setting or set it to
+          <literal>false</literal>, you now need to set it to
+          <literal>true</literal>.
+        </para>
+      </listitem>
+      <listitem>
+        <para>
           The terraform 0.12 compatibility has been removed and the
           <literal>terraform.withPlugins</literal> and
           <literal>terraform-providers.mkProvider</literal>
diff --git a/nixos/doc/manual/release-notes/rl-2205.section.md b/nixos/doc/manual/release-notes/rl-2205.section.md
index 2c2008ba123..37ff778dd9b 100644
--- a/nixos/doc/manual/release-notes/rl-2205.section.md
+++ b/nixos/doc/manual/release-notes/rl-2205.section.md
@@ -158,6 +158,9 @@ In addition to numerous new and upgraded packages, this release has the followin
   module (`services.matrix-synapse`) now need to be moved into `services.matrix-synapse.settings`. And while not all options you
   may use are defined in there, they are still supported, because you can set arbitrary values in this freeform type.
 
+  The `listeners.*.bind_address` option was renamed to `bind_addresses` in order to match the upstream `homeserver.yaml` option
+  name. It is now also a list of strings instead of a string.
+
   An example to make the required migration clearer:
 
   Before:
@@ -215,7 +218,7 @@ In addition to numerous new and upgraded packages, this release has the followin
 
         listeners = [ {
           port = 8448;
-          bind_address = [
+          bind_addresses = [
             "::"
             "0.0.0.0"
           ];
@@ -240,7 +243,9 @@ In addition to numerous new and upgraded packages, this release has the followin
 
   The secrets in your original config should be migrated into a YAML file that is included via `extraConfigFiles`.
 
-  Additionally a few option defaults have been synced up with upstream default values, for example the `max_upload_size` grew from `10M` to `50M`.
+  Additionally a few option defaults have been synced up with upstream default values, for example the `max_upload_size` grew from `10M` to `50M`. For the same reason, the default
+  `media_store_path` was changed from `${dataDir}/media` to `${dataDir}/media_store` if `system.stateVersion` is at least `22.05`. Files will need to be manually moved to the new
+  location if the `stateVersion` is updated.
 
 - The MoinMoin wiki engine (`services.moinmoin`) has been removed, because Python 2 is being retired from nixpkgs.
 
@@ -319,6 +324,8 @@ In addition to numerous new and upgraded packages, this release has the followin
 
 - `systemd-nspawn@.service` settings have been reverted to the default systemd behaviour. User namespaces are now activated by default. If you want to keep running nspawn containers without user namespaces you need to set `systemd.nspawn.<name>.execConfig.PrivateUsers = false`
 
+- The Tor SOCKS proxy is now actually disabled if `services.tor.client.enable` is set to `false` (the default). If you are using this functionality but didn't change the setting or set it to `false`, you now need to set it to `true`.
+
 - The terraform 0.12 compatibility has been removed and the `terraform.withPlugins` and `terraform-providers.mkProvider` implementations simplified. Providers now need to be stored under
 `$out/libexec/terraform-providers/<registry>/<owner>/<name>/<version>/<os>_<arch>/terraform-provider-<name>_v<version>` (which mkProvider does).
 
diff --git a/nixos/modules/services/matrix/matrix-synapse.xml b/nixos/modules/services/matrix/matrix-synapse.xml
index cdc4b4de1a7..cf33957d58e 100644
--- a/nixos/modules/services/matrix/matrix-synapse.xml
+++ b/nixos/modules/services/matrix/matrix-synapse.xml
@@ -119,7 +119,7 @@ in {
     <link linkend="opt-services.matrix-synapse.settings.listeners">listeners</link> = [
       {
         <link linkend="opt-services.matrix-synapse.settings.listeners._.port">port</link> = 8008;
-        <link linkend="opt-services.matrix-synapse.settings.listeners._.bind_addresses">bind_address</link> = [ "::1" ];
+        <link linkend="opt-services.matrix-synapse.settings.listeners._.bind_addresses">bind_addresses</link> = [ "::1" ];
         <link linkend="opt-services.matrix-synapse.settings.listeners._.type">type</link> = "http";
         <link linkend="opt-services.matrix-synapse.settings.listeners._.tls">tls</link> = false;
         <link linkend="opt-services.matrix-synapse.settings.listeners._.x_forwarded">x_forwarded</link> = true;
@@ -152,10 +152,10 @@ in {
 
   <para>
    If you want to run a server with public registration by anybody, you can
-   then enable <literal><link linkend="opt-services.matrix-synapse.settings.enable_registration">services.matrix-synapse.enable_registration</link> =
+   then enable <literal><link linkend="opt-services.matrix-synapse.settings.enable_registration">services.matrix-synapse.settings.enable_registration</link> =
    true;</literal>. Otherwise, or you can generate a registration secret with
    <command>pwgen -s 64 1</command> and set it with
-   <option><link linkend="opt-services.matrix-synapse.settings.registration_shared_secret">services.matrix-synapse.registration_shared_secret</link></option>.
+   <option><link linkend="opt-services.matrix-synapse.settings.registration_shared_secret">services.matrix-synapse.settings.registration_shared_secret</link></option>.
    To create a new user or admin, run the following after you have set the secret
    and have rebuilt NixOS:
 <screen>
diff --git a/nixos/modules/services/networking/pleroma.nix b/nixos/modules/services/networking/pleroma.nix
index 9b8382392c0..c6d4c14dcb7 100644
--- a/nixos/modules/services/networking/pleroma.nix
+++ b/nixos/modules/services/networking/pleroma.nix
@@ -1,6 +1,7 @@
 { config, options, lib, pkgs, stdenv, ... }:
 let
   cfg = config.services.pleroma;
+  cookieFile = "/var/lib/pleroma/.cookie";
 in {
   options = {
     services.pleroma = with lib; {
@@ -8,7 +9,7 @@ in {
 
       package = mkOption {
         type = types.package;
-        default = pkgs.pleroma;
+        default = pkgs.pleroma.override { inherit cookieFile; };
         defaultText = literalExpression "pkgs.pleroma";
         description = "Pleroma package to use.";
       };
@@ -100,7 +101,6 @@ in {
       after = [ "network-online.target" "postgresql.service" ];
       wantedBy = [ "multi-user.target" ];
       restartTriggers = [ config.environment.etc."/pleroma/config.exs".source ];
-      environment.RELEASE_COOKIE = "/var/lib/pleroma/.cookie";
       serviceConfig = {
         User = cfg.user;
         Group = cfg.group;
@@ -118,10 +118,10 @@ in {
         # Better be safe than sorry migration-wise.
         ExecStartPre =
           let preScript = pkgs.writers.writeBashBin "pleromaStartPre" ''
-            if [ ! -f /var/lib/pleroma/.cookie ]
+            if [ ! -f "${cookieFile}" ] || [ ! -s "${cookieFile}" ]
             then
               echo "Creating cookie file"
-              dd if=/dev/urandom bs=1 count=16 | hexdump -e '16/1 "%02x"' > /var/lib/pleroma/.cookie
+              dd if=/dev/urandom bs=1 count=16 | ${pkgs.hexdump}/bin/hexdump -e '16/1 "%02x"' > "${cookieFile}"
             fi
             ${cfg.package}/bin/pleroma_ctl migrate
           '';
diff --git a/nixos/modules/services/security/oauth2_proxy.nix b/nixos/modules/services/security/oauth2_proxy.nix
index 4d356242417..ce295bd4ba3 100644
--- a/nixos/modules/services/security/oauth2_proxy.nix
+++ b/nixos/modules/services/security/oauth2_proxy.nix
@@ -102,17 +102,19 @@ in
     # Taken from: https://github.com/oauth2-proxy/oauth2-proxy/blob/master/providers/providers.go
     provider = mkOption {
       type = types.enum [
-        "google"
+        "adfs"
         "azure"
+        "bitbucket"
+        "digitalocean"
         "facebook"
         "github"
-        "keycloak"
         "gitlab"
+        "google"
+        "keycloak"
+        "keycloak-oidc"
         "linkedin"
         "login.gov"
-        "bitbucket"
         "nextcloud"
-        "digitalocean"
         "oidc"
       ];
       default = "google";
diff --git a/nixos/modules/services/security/tor.nix b/nixos/modules/services/security/tor.nix
index ddd216ca7fd..a5822c02794 100644
--- a/nixos/modules/services/security/tor.nix
+++ b/nixos/modules/services/security/tor.nix
@@ -910,6 +910,11 @@ in
         ORPort = mkForce [];
         PublishServerDescriptor = mkForce false;
       })
+      (mkIf (!cfg.client.enable) {
+        # Make sure application connections via SOCKS are disabled
+        # when services.tor.client.enable is false
+        SOCKSPort = mkForce [ 0 ];
+      })
       (mkIf cfg.client.enable (
         { SOCKSPort = [ cfg.client.socksListenAddress ];
         } // optionalAttrs cfg.client.transparentProxy.enable {
diff --git a/nixos/modules/services/x11/display-managers/default.nix b/nixos/modules/services/x11/display-managers/default.nix
index 03fe68fe505..a5db3dd5dd4 100644
--- a/nixos/modules/services/x11/display-managers/default.nix
+++ b/nixos/modules/services/x11/display-managers/default.nix
@@ -219,24 +219,7 @@ in
 
       session = mkOption {
         default = [];
-        type = with types; listOf (submodule ({ ... }: {
-          options = {
-            manage = mkOption {
-              description = "Whether this is a desktop or a window manager";
-              type = enum [ "desktop" "window" ];
-            };
-
-            name = mkOption {
-              description = "Name of this session";
-              type = str;
-            };
-
-            start = mkOption {
-              description = "Commands to run to start this session";
-              type = lines;
-            };
-          };
-        }));
+        type = types.listOf types.attrs;
         example = literalExpression
           ''
             [ { manage = "desktop";
diff --git a/nixos/tests/all-tests.nix b/nixos/tests/all-tests.nix
index b2b35119ce3..474bb423379 100644
--- a/nixos/tests/all-tests.nix
+++ b/nixos/tests/all-tests.nix
@@ -286,6 +286,7 @@ in
   mailhog = handleTest ./mailhog.nix {};
   man = handleTest ./man.nix {};
   mariadb-galera = handleTest ./mysql/mariadb-galera.nix {};
+  mastodon = handleTestOn ["x86_64-linux" "i686-linux" "aarch64-linux"] ./web-apps/mastodon.nix {};
   matomo = handleTest ./matomo.nix {};
   matrix-appservice-irc = handleTest ./matrix-appservice-irc.nix {};
   matrix-conduit = handleTest ./matrix-conduit.nix {};
@@ -521,6 +522,7 @@ in
   telegraf = handleTest ./telegraf.nix {};
   teleport = handleTest ./teleport.nix {};
   thelounge = handleTest ./thelounge.nix {};
+  terminal-emulators = handleTest ./terminal-emulators.nix {};
   tiddlywiki = handleTest ./tiddlywiki.nix {};
   tigervnc = handleTest ./tigervnc.nix {};
   timezone = handleTest ./timezone.nix {};
diff --git a/nixos/tests/pleroma.nix b/nixos/tests/pleroma.nix
index bf3623fce38..90a9a251104 100644
--- a/nixos/tests/pleroma.nix
+++ b/nixos/tests/pleroma.nix
@@ -32,8 +32,7 @@ import ./make-test-python.nix ({ pkgs, ... }:
     # system one. Overriding this pretty bad default behaviour.
     export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
 
-    export TOOT_LOGIN_CLI_PASSWORD="jamy-password"
-    toot login_cli -i "pleroma.nixos.test" -e "jamy@nixos.test"
+    echo "jamy-password" | toot login_cli -i "pleroma.nixos.test" -e "jamy@nixos.test"
     echo "Login OK"
 
     # Send a toot then verify it's part of the public timeline
@@ -168,21 +167,6 @@ import ./make-test-python.nix ({ pkgs, ... }:
     cp key.pem cert.pem $out
   '';
 
-  /* Toot is preventing users from feeding login_cli a password non
-     interactively. While it makes sense most of the times, it's
-     preventing us to login in this non-interactive test. This patch
-     introduce a TOOT_LOGIN_CLI_PASSWORD env variable allowing us to
-     provide a password to toot login_cli
-
-     If https://github.com/ihabunek/toot/pull/180 gets merged at some
-     point, feel free to remove this patch. */
-  custom-toot = pkgs.toot.overrideAttrs(old:{
-    patches = [ (pkgs.fetchpatch {
-      url = "https://github.com/NinjaTrappeur/toot/commit/b4a4c30f41c0cb7e336714c2c4af9bc9bfa0c9f2.patch";
-      sha256 = "sha256-0xxNwjR/fStLjjUUhwzCCfrghRVts+fc+fvVJqVcaFg=";
-    }) ];
-  });
-
   hosts = nodes: ''
     ${nodes.pleroma.config.networking.primaryIPAddress} pleroma.nixos.test
     ${nodes.client.config.networking.primaryIPAddress} client.nixos.test
@@ -194,7 +178,7 @@ import ./make-test-python.nix ({ pkgs, ... }:
       security.pki.certificateFiles = [ "${tls-cert}/cert.pem" ];
       networking.extraHosts = hosts nodes;
       environment.systemPackages = with pkgs; [
-        custom-toot
+        toot
         send-toot
       ];
     };
diff --git a/nixos/tests/terminal-emulators.nix b/nixos/tests/terminal-emulators.nix
new file mode 100644
index 00000000000..60161b80b96
--- /dev/null
+++ b/nixos/tests/terminal-emulators.nix
@@ -0,0 +1,207 @@
+# Terminal emulators all present a pretty similar interface.
+# That gives us an opportunity to easily test their basic functionality with a single codebase.
+#
+# There are two tests run on each terminal emulator
+# - can it successfully execute a command passed on the cmdline?
+# - can it successfully display a colour?
+# the latter is used as a proxy for "can it display text?", without going through all the intricacies of OCR.
+#
+# 256-colour terminal mode is used to display the test colour, since it has a universally-applicable palette (unlike 8- and 16- colour, where the colours are implementation-defined), and it is widely supported (unlike 24-bit colour).
+#
+# Future work:
+# - Wayland support (both for testing the existing terminals, and for testing wayland-only terminals like foot and havoc)
+# - Test keyboard input? (skipped for now, to eliminate the possibility of race conditions and focus issues)
+
+{ system ? builtins.currentSystem,
+  config ? {},
+  pkgs ? import ../.. { inherit system config; }
+}:
+
+with import ../lib/testing-python.nix { inherit system pkgs; };
+with pkgs.lib;
+
+let tests = {
+      alacritty.pkg = p: p.alacritty;
+
+      contour.pkg = p: p.contour;
+      contour.cmd = "contour $command";
+
+      cool-retro-term.pkg = p: p.cool-retro-term;
+      cool-retro-term.colourTest = false; # broken by gloss effect
+
+      ctx.pkg = p: p.ctx;
+      ctx.pinkValue = "#FE0065";
+
+      darktile.pkg = p: p.darktile;
+
+      eterm.pkg = p: p.eterm;
+      eterm.executable = "Eterm";
+      eterm.pinkValue = "#D40055";
+
+      germinal.pkg = p: p.germinal;
+
+      gnome-terminal.pkg = p: p.gnome.gnome-terminal;
+
+      guake.pkg = p: p.guake;
+      guake.cmd = "SHELL=$command guake --show";
+      guake.kill = true;
+
+      hyper.pkg = p: p.hyper;
+
+      kermit.pkg = p: p.kermit-terminal;
+
+      kgx.pkg = p: p.kgx;
+      kgx.cmd = "kgx -e $command";
+      kgx.kill = true;
+
+      kitty.pkg = p: p.kitty;
+      kitty.cmd = "kitty $command";
+
+      konsole.pkg = p: p.plasma5Packages.konsole;
+
+      lxterminal.pkg = p: p.lxterminal;
+
+      mate-terminal.pkg = p: p.mate.mate-terminal;
+      mate-terminal.cmd = "SHELL=$command mate-terminal --disable-factory"; # factory mode uses dbus, and we don't have a proper dbus session set up
+
+      mlterm.pkg = p: p.mlterm;
+
+      mrxvt.pkg = p: p.mrxvt;
+
+      qterminal.pkg = p: p.lxqt.qterminal;
+      qterminal.kill = true;
+
+      roxterm.pkg = p: p.roxterm;
+      roxterm.cmd = "roxterm -e $command";
+
+      sakura.pkg = p: p.sakura;
+
+      st.pkg = p: p.st;
+      st.kill = true;
+
+      stupidterm.pkg = p: p.stupidterm;
+      stupidterm.cmd = "stupidterm -- $command";
+
+      terminator.pkg = p: p.terminator;
+      terminator.cmd = "terminator -e $command";
+
+      terminology.pkg = p: p.enlightenment.terminology;
+      terminology.cmd = "SHELL=$command terminology --no-wizard=true";
+      terminology.colourTest = false; # broken by gloss effect
+
+      termite.pkg = p: p.termite;
+
+      termonad.pkg = p: p.termonad;
+
+      tilda.pkg = p: p.tilda;
+
+      tilix.pkg = p: p.tilix;
+      tilix.cmd = "tilix -e $command";
+
+      urxvt.pkg = p: p.rxvt-unicode;
+
+      wayst.pkg = p: p.wayst;
+      wayst.pinkValue = "#FF0066";
+
+      wezterm.pkg = p: p.wezterm;
+
+      xfce4-terminal.pkg = p: p.xfce.xfce4-terminal;
+
+      xterm.pkg = p: p.xterm;
+    };
+in mapAttrs (name: { pkg, executable ? name, cmd ? "SHELL=$command ${executable}", colourTest ? true, pinkValue ? "#FF0087", kill ? false }: makeTest
+{
+  name = "terminal-emulator-${name}";
+  meta = with pkgs.stdenv.lib.maintainers; {
+    maintainers = [ jjjollyjim ];
+  };
+
+  machine = { pkgsInner, ... }:
+
+  {
+    imports = [ ./common/x11.nix ./common/user-account.nix ];
+
+    # Hyper (and any other electron-based terminals) won't run as root
+    test-support.displayManager.auto.user = "alice";
+
+    environment.systemPackages = [
+      (pkg pkgs)
+      (pkgs.writeShellScriptBin "report-success" ''
+        echo 1 > /tmp/term-ran-successfully
+        ${optionalString kill "pkill ${executable}"}
+      '')
+      (pkgs.writeShellScriptBin "display-colour" ''
+        # A 256-colour background colour code for pink, then spaces.
+        #
+        # Background is used rather than foreground to minimize the effect of anti-aliasing.
+        #
+        # Keep adding more in case the window is partially offscreen to the left or requires
+        # a change to correctly redraw after initialising the window (as with ctx).
+
+        while :
+        do
+            echo -ne "\e[48;5;198m                   "
+            sleep 0.5
+        done
+        sleep infinity
+      '')
+      (pkgs.writeShellScriptBin "run-in-this-term" "sudo -u alice run-in-this-term-wrapped $1")
+
+      (pkgs.writeShellScriptBin "run-in-this-term-wrapped" "command=\"$(which \"$1\")\"; ${cmd}")
+    ];
+
+    # Helpful reminder to add this test to passthru.tests
+    warnings = if !((pkg pkgs) ? "passthru" && (pkg pkgs).passthru ? "tests") then [ "The package for ${name} doesn't have a passthru.tests" ] else [ ];
+  };
+
+  # We need imagemagick, though not tesseract
+  enableOCR = true;
+
+  testScript = { nodes, ... }: let
+  in ''
+    with subtest("wait for x"):
+        start_all()
+        machine.wait_for_x()
+
+    with subtest("have the terminal run a command"):
+        # We run this command synchronously, so we can be certain the exit codes are happy
+        machine.${if kill then "execute" else "succeed"}("run-in-this-term report-success")
+        machine.wait_for_file("/tmp/term-ran-successfully")
+    ${optionalString colourTest ''
+
+    import tempfile
+    import subprocess
+
+
+    def check_for_pink(final=False) -> bool:
+        with tempfile.NamedTemporaryFile() as tmpin:
+            machine.send_monitor_command("screendump {}".format(tmpin.name))
+
+            cmd = 'convert {} -define histogram:unique-colors=true -format "%c" histogram:info:'.format(
+                tmpin.name
+            )
+            ret = subprocess.run(cmd, shell=True, capture_output=True)
+            if ret.returncode != 0:
+                raise Exception(
+                    "image analysis failed with exit code {}".format(ret.returncode)
+                )
+
+            text = ret.stdout.decode("utf-8")
+            return "${pinkValue}" in text
+
+
+    with subtest("ensuring no pink is present without the terminal"):
+        assert (
+            check_for_pink() == False
+        ), "Pink was present on the screen before we even launched a terminal!"
+
+    with subtest("have the terminal display a colour"):
+        # We run this command in the background
+        machine.shell.send(b"(run-in-this-term display-colour |& systemd-cat -t terminal) &\n")
+
+        with machine.nested("Waiting for the screen to have pink on it:"):
+            retry(check_for_pink)
+  ''}'';
+}
+
+  ) tests
diff --git a/nixos/tests/web-apps/mastodon.nix b/nixos/tests/web-apps/mastodon.nix
new file mode 100644
index 00000000000..279a1c59169
--- /dev/null
+++ b/nixos/tests/web-apps/mastodon.nix
@@ -0,0 +1,170 @@
+import ../make-test-python.nix ({pkgs, ...}:
+let
+  test-certificates = pkgs.runCommandLocal "test-certificates" { } ''
+    mkdir -p $out
+    echo insecure-root-password > $out/root-password-file
+    echo insecure-intermediate-password > $out/intermediate-password-file
+    ${pkgs.step-cli}/bin/step certificate create "Example Root CA" $out/root_ca.crt $out/root_ca.key --password-file=$out/root-password-file --profile root-ca
+    ${pkgs.step-cli}/bin/step certificate create "Example Intermediate CA 1" $out/intermediate_ca.crt $out/intermediate_ca.key --password-file=$out/intermediate-password-file --ca-password-file=$out/root-password-file --profile intermediate-ca --ca $out/root_ca.crt --ca-key $out/root_ca.key
+  '';
+
+  hosts = ''
+    192.168.2.10 ca.local
+    192.168.2.11 mastodon.local
+  '';
+
+in
+{
+  name = "mastodon";
+  meta.maintainers = with pkgs.lib.maintainers; [ erictapen izorkin ];
+
+  nodes = {
+    ca = { pkgs, ... }: {
+      networking = {
+        interfaces.eth1 = {
+          ipv4.addresses = [
+            { address = "192.168.2.10"; prefixLength = 24; }
+          ];
+        };
+        extraHosts = hosts;
+      };
+      services.step-ca = {
+        enable = true;
+        address = "0.0.0.0";
+        port = 8443;
+        openFirewall = true;
+        intermediatePasswordFile = "${test-certificates}/intermediate-password-file";
+        settings = {
+          dnsNames = [ "ca.local" ];
+          root = "${test-certificates}/root_ca.crt";
+          crt = "${test-certificates}/intermediate_ca.crt";
+          key = "${test-certificates}/intermediate_ca.key";
+          db = {
+            type = "badger";
+            dataSource = "/var/lib/step-ca/db";
+          };
+          authority = {
+            provisioners = [
+              {
+                type = "ACME";
+                name = "acme";
+              }
+            ];
+          };
+        };
+      };
+    };
+
+    server = { pkgs, ... }: {
+      networking = {
+        interfaces.eth1 = {
+          ipv4.addresses = [
+            { address = "192.168.2.11"; prefixLength = 24; }
+          ];
+        };
+        extraHosts = hosts;
+        firewall.allowedTCPPorts = [ 80 443 ];
+      };
+
+      security = {
+        acme = {
+          acceptTerms = true;
+          defaults.server = "https://ca.local:8443/acme/acme/directory";
+          defaults.email = "mastodon@mastodon.local";
+        };
+        pki.certificateFiles = [ "${test-certificates}/root_ca.crt" ];
+      };
+
+      services.redis.servers.mastodon = {
+        enable = true;
+        bind = "127.0.0.1";
+        port = 31637;
+      };
+
+      services.mastodon = {
+        enable = true;
+        configureNginx = true;
+        localDomain = "mastodon.local";
+        enableUnixSocket = false;
+        redis = {
+          createLocally = true;
+          host = "127.0.0.1";
+          port = 31637;
+        };
+        database = {
+          createLocally = true;
+          host = "/run/postgresql";
+          port = 5432;
+        };
+        smtp = {
+          createLocally = false;
+          fromAddress = "mastodon@mastodon.local";
+        };
+        extraConfig = {
+          EMAIL_DOMAIN_ALLOWLIST = "example.com";
+        };
+      };
+    };
+
+    client = { pkgs, ... }: {
+      environment.systemPackages = [ pkgs.jq ];
+      networking = {
+        interfaces.eth1 = {
+          ipv4.addresses = [
+            { address = "192.168.2.12"; prefixLength = 24; }
+          ];
+        };
+        extraHosts = hosts;
+      };
+
+      security = {
+        pki.certificateFiles = [ "${test-certificates}/root_ca.crt" ];
+      };
+    };
+  };
+
+  testScript = ''
+    start_all()
+
+    ca.wait_for_unit("step-ca.service")
+    ca.wait_for_open_port(8443)
+
+    server.wait_for_unit("nginx.service")
+    server.wait_for_unit("redis-mastodon.service")
+    server.wait_for_unit("postgresql.service")
+    server.wait_for_unit("mastodon-sidekiq.service")
+    server.wait_for_unit("mastodon-streaming.service")
+    server.wait_for_unit("mastodon-web.service")
+    server.wait_for_open_port(55000)
+    server.wait_for_open_port(55001)
+
+    # Check Mastodon version from remote client
+    client.succeed("curl --fail https://mastodon.local/api/v1/instance | jq -r '.version' | grep '${pkgs.mastodon.version}'")
+
+    # Check using admin CLI
+    # Check Mastodon version
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl version' | grep '${pkgs.mastodon.version}'")
+
+    # Manage accounts
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl email_domain_blocks add example.com'")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl email_domain_blocks list' | grep 'example.com'")
+    server.fail("su - mastodon -s /bin/sh -c 'mastodon-env tootctl email_domain_blocks list' | grep 'mastodon.local'")
+    server.fail("su - mastodon -s /bin/sh -c 'mastodon-env tootctl accounts create alice --email=alice@example.com'")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl email_domain_blocks remove example.com'")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl accounts create bob --email=bob@example.com'")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl accounts approve bob'")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl accounts delete bob'")
+
+    # Manage IP access
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl ip_blocks add 192.168.0.0/16 --severity=no_access'")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl ip_blocks export' | grep '192.168.0.0/16'")
+    server.fail("su - mastodon -s /bin/sh -c 'mastodon-env tootctl p_blocks export' | grep '172.16.0.0/16'")
+    client.fail("curl --fail https://mastodon.local/about")
+    server.succeed("su - mastodon -s /bin/sh -c 'mastodon-env tootctl ip_blocks remove 192.168.0.0/16'")
+    client.succeed("curl --fail https://mastodon.local/about")
+
+    ca.shutdown()
+    server.shutdown()
+    client.shutdown()
+  '';
+})
diff --git a/nixos/tests/wine.nix b/nixos/tests/wine.nix
index cc449864c76..8135cb90a59 100644
--- a/nixos/tests/wine.nix
+++ b/nixos/tests/wine.nix
@@ -3,7 +3,7 @@
 }:
 
 let
-  inherit (pkgs.lib) concatMapStrings listToAttrs;
+  inherit (pkgs.lib) concatMapStrings listToAttrs optionals optionalString;
   inherit (import ../lib/testing-python.nix { inherit system pkgs; }) makeTest;
 
   hello32 = "${pkgs.pkgsCross.mingw32.hello}/bin/hello.exe";
@@ -27,6 +27,9 @@ let
               "bash -c 'wine ${exe} 2> >(tee wine-stderr >&2)'"
           )
           assert 'Hello, world!' in greeting
+        ''
+        # only the full version contains Gecko, but the error is not printed reliably in other variants
+        + optionalString (variant == "full") ''
           machine.fail(
               "fgrep 'Could not find Wine Gecko. HTML rendering will be disabled.' wine-stderr"
           )
@@ -37,5 +40,9 @@ let
 
   variants = [ "base" "full" "minimal" "staging" "unstable" "wayland" ];
 
-in listToAttrs (map (makeWineTest "winePackages" [ hello32 ]) variants
-  ++ map (makeWineTest "wineWowPackages" [ hello32 hello64 ]) variants)
+in
+listToAttrs (
+  map (makeWineTest "winePackages" [ hello32 ]) variants
+  ++ optionals pkgs.stdenv.is64bit
+    (map (makeWineTest "wineWowPackages" [ hello32 hello64 ]) variants)
+)
diff --git a/pkgs/applications/audio/cider/default.nix b/pkgs/applications/audio/cider/default.nix
new file mode 100644
index 00000000000..edab6d109ef
--- /dev/null
+++ b/pkgs/applications/audio/cider/default.nix
@@ -0,0 +1,30 @@
+{ appimageTools, lib, fetchurl }:
+
+appimageTools.wrapType2 rec {
+  pname = "cider";
+  version = "1.3.1308";
+
+  src = fetchurl {
+    url = "https://1308-429851205-gh.circle-artifacts.com/0/%7E/Cider/dist/artifacts/Cider-${version}.AppImage";
+    sha256 = "1lbyvn1c8155p039qfzx7jwad7km073phkmrzjm0w3ahdpwz3wgi";
+  };
+
+  extraInstallCommands =
+    let contents = appimageTools.extract { inherit pname version src; };
+    in ''
+      mv $out/bin/${pname}-${version} $out/bin/${pname}
+
+      install -m 444 -D ${contents}/${pname}.desktop -t $out/share/applications
+      substituteInPlace $out/share/applications/${pname}.desktop \
+        --replace 'Exec=AppRun' 'Exec=${pname}'
+      cp -r ${contents}/usr/share/icons $out/share
+    '';
+
+  meta = with lib; {
+    description = "A new look into listening and enjoying Apple Music in style and performance.";
+    homepage = "https://github.com/ciderapp/Cider";
+    license = licenses.agpl3;
+    maintainers = [ maintainers.cigrainger ];
+    platforms = [ "x86_64-linux" ];
+  };
+}
diff --git a/pkgs/applications/audio/eteroj.lv2/default.nix b/pkgs/applications/audio/eteroj.lv2/default.nix
deleted file mode 100644
index ee95ce43ff5..00000000000
--- a/pkgs/applications/audio/eteroj.lv2/default.nix
+++ /dev/null
@@ -1,23 +0,0 @@
-{ lib, stdenv, fetchFromGitHub, cmake, pkg-config, libuv, lv2 }:
-
-stdenv.mkDerivation rec {
-  pname = "eteroj.lv2";
-  version = "0.4.0";
-
-  src = fetchFromGitHub {
-    owner  = "OpenMusicKontrollers";
-    repo   = pname;
-    rev    = version;
-    sha256 = "0lzdk7hlz3vqgshrfpj0izjad1fmsnzk2vxqrry70xgz8xglvnmn";
-  };
-
-  buildInputs = [ libuv lv2 ];
-  nativeBuildInputs = [ cmake pkg-config ];
-
-  meta = with lib; {
-    description = "OSC injection/ejection from/to UDP/TCP/Serial for LV2";
-    homepage = "https://open-music-kontrollers.ch/lv2/eteroj";
-    license = licenses.artistic2;
-    maintainers = with maintainers; [ magnetophon ];
-  };
-}
diff --git a/pkgs/applications/audio/lv2lint/default.nix b/pkgs/applications/audio/lv2lint/default.nix
new file mode 100644
index 00000000000..ada996866c8
--- /dev/null
+++ b/pkgs/applications/audio/lv2lint/default.nix
@@ -0,0 +1,22 @@
+{ stdenv, lib, fetchurl, pkg-config, meson, ninja, lv2, lilv, curl, libelf }:
+
+stdenv.mkDerivation rec {
+  pname = "lv2lint";
+  version = "0.14.0";
+
+  src = fetchurl {
+    url = "https://git.open-music-kontrollers.ch/lv2/${pname}/snapshot/${pname}-${version}.tar.xz";
+    sha256 = "sha256-yPKM7RToLNBT+AXSjfxxpncESmv89/wcGCt//pnEGqI=";
+  };
+
+  nativeBuildInputs = [ pkg-config meson ninja ];
+  buildInputs = [ lv2 lilv curl libelf ];
+
+  meta = with lib; {
+    description = "Check whether a given LV2 plugin is up to the specification";
+    homepage = "https://open-music-kontrollers.ch/lv2/${pname}:";
+    license = licenses.artistic2;
+    maintainers = [ maintainers.magnetophon ];
+    platforms = platforms.all;
+  };
+}
diff --git a/pkgs/applications/audio/open-music-kontrollers/eteroj.nix b/pkgs/applications/audio/open-music-kontrollers/eteroj.nix
new file mode 100644
index 00000000000..60de97ca3d0
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/eteroj.nix
@@ -0,0 +1,10 @@
+{ callPackage, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "eteroj";
+  version = "0.10.0";
+
+  sha256 = "18iv1sdwm0g6b53shsylj6bf3svmvvy5xadhfsgb4xg39qr07djz";
+
+  description = "OSC injection/ejection from/to UDP/TCP/Serial for LV2";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/generic.nix b/pkgs/applications/audio/open-music-kontrollers/generic.nix
new file mode 100644
index 00000000000..a5ed7c41eb7
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/generic.nix
@@ -0,0 +1,37 @@
+{ stdenv, lib, fetchurl, pkg-config, meson, ninja, libGLU, lv2, serd, sord, libX11, libXext, glew, lv2lint
+, pname, version, sha256, description
+, url ? "https://git.open-music-kontrollers.ch/lv2/${pname}.lv2/snapshot/${pname}.lv2-${version}.tar.xz"
+, additionalBuildInputs ? []
+, postPatch ? ""
+, ...
+}:
+
+stdenv.mkDerivation {
+  inherit pname;
+
+  inherit version;
+
+  inherit postPatch;
+
+  src = fetchurl {
+    url = url;
+    sha256 = sha256;
+  };
+  nativeBuildInputs = [ pkg-config meson ninja ];
+  buildInputs = [
+    lv2
+    sord
+    libX11
+    libXext
+    glew
+    lv2lint
+  ] ++ additionalBuildInputs;
+
+  meta = with lib; {
+    description = description;
+    homepage = "https://open-music-kontrollers.ch/lv2/${pname}:";
+    license = licenses.artistic2;
+    maintainers = [ maintainers.magnetophon ];
+    platforms = platforms.all;
+  };
+}
diff --git a/pkgs/applications/audio/open-music-kontrollers/jit.nix b/pkgs/applications/audio/open-music-kontrollers/jit.nix
new file mode 100644
index 00000000000..937a6446a8d
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/jit.nix
@@ -0,0 +1,12 @@
+{ callPackage, lv2, fontconfig, libvterm-neovim, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "jit";
+  version = "unstable-2021-08-15";
+  url = "https://git.open-music-kontrollers.ch/lv2/${pname}.lv2/snapshot/${pname}.lv2-1f5d6935049fc0dd5a4dc257b84b36d2048f2d83.tar.xz";
+  sha256 = "sha256-XGICowVb0JgLJpn2h9GtViobYTdmo1LJ7/JFEyVsIqU=";
+
+  additionalBuildInputs = [ lv2 fontconfig libvterm-neovim ];
+
+  description = "A Just-in-Time C/Rust compiler embedded in an LV2 plugin";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/mephisto.nix b/pkgs/applications/audio/open-music-kontrollers/mephisto.nix
new file mode 100644
index 00000000000..de707edd3d9
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/mephisto.nix
@@ -0,0 +1,17 @@
+{ callPackage, faust, fontconfig, cmake, libvterm-neovim, libevdev, libglvnd, fira-code, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "mephisto";
+  version = "0.16.0";
+
+  sha256 = "0vgr3rsvdj4w0xpc5iqpvyqilk42wr9zs8bg26sfv3f2wi4hb6gx";
+
+  additionalBuildInputs = [ faust fontconfig cmake libvterm-neovim libevdev libglvnd fira-code ];
+
+  # see: https://github.com/OpenMusicKontrollers/mephisto.lv2/issues/6
+  postPatch = ''
+    sed -i 's/llvm-c-dsp/llvm-dsp-c/g' mephisto.c
+  '';
+
+  description = "A Just-in-time FAUST embedded in an LV2 plugin";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/midi_matrix.nix b/pkgs/applications/audio/open-music-kontrollers/midi_matrix.nix
new file mode 100644
index 00000000000..cd431d1a1d3
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/midi_matrix.nix
@@ -0,0 +1,10 @@
+{ callPackage, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "midi_matrix";
+  version = "0.30.0";
+
+  sha256 = "1nwmfxdzk4pvbwcgi3d7v4flqc10bmi2fxhrhrpfa7cafqs40ib6";
+
+  description = "An LV2 MIDI channel matrix patcher";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/moony.nix b/pkgs/applications/audio/open-music-kontrollers/moony.nix
new file mode 100644
index 00000000000..6087e055376
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/moony.nix
@@ -0,0 +1,13 @@
+{ callPackage, cairo, libvterm-neovim, robodoc, cmake, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "moony";
+  version = "0.40.0";
+
+  sha256 = "sha256-9a3gR3lV8xFFTDZD+fJPCALVztgmggzyIpsPZCOw/uY=";
+
+  additionalBuildInputs = [ cairo libvterm-neovim robodoc cmake ];
+
+  description = "Realtime Lua as programmable glue in LV2";
+
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/orbit.nix b/pkgs/applications/audio/open-music-kontrollers/orbit.nix
new file mode 100644
index 00000000000..930fc17760f
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/orbit.nix
@@ -0,0 +1,12 @@
+{ callPackage, zlib, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "orbit";
+  version = "unstable-2021-04-13";
+  url = "https://git.open-music-kontrollers.ch/lv2/${pname}.lv2/snapshot/${pname}.lv2-f4aa620fc8d77418856581a6a955192af15b3860.tar.xz";
+  sha256 = "0z8d8h2w8fb2zx84n697jvy32dc0vf60jyiyh4gm22prgr2dvgkc";
+
+  additionalBuildInputs = [ zlib ];
+
+  description = "An LV2 time event manipulation plugin bundle";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/patchmatrix.nix b/pkgs/applications/audio/open-music-kontrollers/patchmatrix.nix
new file mode 100644
index 00000000000..cc343e0330c
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/patchmatrix.nix
@@ -0,0 +1,13 @@
+{ callPackage, libjack2, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "patchmatrix";
+  version = "0.26.0";
+
+  url = "https://git.open-music-kontrollers.ch/lad/${pname}/snapshot/${pname}-${version}.tar.xz";
+  sha256 = "sha256-cqPHCnrAhHB6a0xmPUYOAsZfLsqnGpXEuGR1W6i6W7I=";
+
+  additionalBuildInputs = [ libjack2 ];
+
+  description = "A JACK patchbay in flow matrix style";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/router.nix b/pkgs/applications/audio/open-music-kontrollers/router.nix
new file mode 100644
index 00000000000..40d3a5e6fbc
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/router.nix
@@ -0,0 +1,11 @@
+{ callPackage, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "router";
+  version = "unstable-2021-04-13";
+
+  url = "https://git.open-music-kontrollers.ch/lv2/${pname}.lv2/snapshot/${pname}.lv2-7d754dd64c540d40b828166401617715dc235ca3.tar.xz";
+  sha256 = "sha256-LjaW5Xdxfjzd6IJ2ptHzmHt7fhU1HQo7ubZ4USVqRE8=";
+
+  description = "An atom/audio/CV router LV2 plugin bundle";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/sherlock.nix b/pkgs/applications/audio/open-music-kontrollers/sherlock.nix
new file mode 100644
index 00000000000..c8ae0afd09a
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/sherlock.nix
@@ -0,0 +1,12 @@
+{ callPackage, sratom, flex, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "sherlock";
+  version = "0.28.0";
+
+  sha256 = "07zj88s1593fpw2s0r3ix7cj2icfd9zyirsyhr2i8l6d30b6n6fb";
+
+  additionalBuildInputs = [ sratom flex ];
+
+  description = "Plugins for visualizing LV2 atom, MIDI and OSC events";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/synthpod.nix b/pkgs/applications/audio/open-music-kontrollers/synthpod.nix
new file mode 100644
index 00000000000..9b7964e839b
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/synthpod.nix
@@ -0,0 +1,13 @@
+{ callPackage, lilv, libjack2, alsa-lib, zita-alsa-pcmi, libxcb, xcbutilxrm, sratom, gtk2, qt5,  libvterm-neovim, robodoc, cmake,... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "synthpod";
+  version = "unstable-2021-10-22";
+
+  url = "https://git.open-music-kontrollers.ch/lv2/synthpod/snapshot/synthpod-6f284bdad882037a522c120af92b96d8abf2de60.tar.xz";
+  sha256 = "sha256-59WBlOKum5Pcmq2CfFfRHCNEa8uPCoBk0kSjFlIcypw=";
+
+  additionalBuildInputs = [ lilv libjack2 alsa-lib zita-alsa-pcmi libxcb xcbutilxrm sratom gtk2 qt5.qtbase qt5.wrapQtAppsHook libvterm-neovim robodoc cmake ];
+
+  description = "Lightweight Nonlinear LV2 Plugin Container";
+})
diff --git a/pkgs/applications/audio/open-music-kontrollers/vm.nix b/pkgs/applications/audio/open-music-kontrollers/vm.nix
new file mode 100644
index 00000000000..8d432d356cf
--- /dev/null
+++ b/pkgs/applications/audio/open-music-kontrollers/vm.nix
@@ -0,0 +1,10 @@
+{ callPackage, ... } @ args:
+
+callPackage ./generic.nix (args // rec {
+  pname = "vm";
+  version = "0.14.0";
+
+  sha256 = "013gq7jn556nkk1nq6zzh9nmp3fb36jd7ndzvyq3qryw7khzkagc";
+
+  description = "A programmable virtual machine LV2 plugin";
+})
diff --git a/pkgs/applications/audio/patchmatrix/default.nix b/pkgs/applications/audio/patchmatrix/default.nix
deleted file mode 100644
index a5d135ce9d4..00000000000
--- a/pkgs/applications/audio/patchmatrix/default.nix
+++ /dev/null
@@ -1,45 +0,0 @@
-{ stdenv
-, lib
-, fetchFromGitHub
-, libjack2
-, lv2
-, meson
-, ninja
-, pkg-config
-, glew
-, xorg
-}:
-
-stdenv.mkDerivation rec {
-  pname = "patchmatrix";
-  version = "0.26.0";
-
-  src = fetchFromGitHub {
-    owner = "OpenMusicKontrollers";
-    repo = pname;
-    rev = version;
-    hash = "sha256-rR3y5rGzmib//caPmhthvMelAdHRvV0lMRfvcj9kcCg=";
-  };
-
-  nativeBuildInputs = [
-    meson
-    ninja
-    pkg-config
-  ];
-
-  buildInputs = [
-    glew
-    libjack2
-    lv2
-    xorg.libX11
-    xorg.libXext
-  ];
-
-  meta = with lib; {
-    description = "A JACK patchbay in flow matrix style";
-    homepage = "https://github.com/OpenMusicKontrollers/patchmatrix";
-    license = licenses.artistic2;
-    maintainers = with maintainers; [ pennae ];
-    platforms = platforms.linux;
-  };
-}
diff --git a/pkgs/applications/audio/pt2-clone/default.nix b/pkgs/applications/audio/pt2-clone/default.nix
index 57c327d429d..2577f61e8d5 100644
--- a/pkgs/applications/audio/pt2-clone/default.nix
+++ b/pkgs/applications/audio/pt2-clone/default.nix
@@ -8,13 +8,13 @@
 
 stdenv.mkDerivation rec {
   pname = "pt2-clone";
-  version = "1.42";
+  version = "1.43";
 
   src = fetchFromGitHub {
     owner = "8bitbubsy";
     repo = "pt2-clone";
     rev = "v${version}";
-    sha256 = "sha256-CwnEvQsxrYStJ4RxnE0lHt1fBHQEZrjSldnQnTOPaE0=";
+    sha256 = "sha256-+sHGjgDqizv/9n0dDj8knsl+4MBfO3/pMkmD+MPsuNM=";
   };
 
   nativeBuildInputs = [ cmake ];
diff --git a/pkgs/applications/audio/pyradio/default.nix b/pkgs/applications/audio/pyradio/default.nix
index e95e5d9c6d1..ebddac8c539 100644
--- a/pkgs/applications/audio/pyradio/default.nix
+++ b/pkgs/applications/audio/pyradio/default.nix
@@ -2,7 +2,7 @@
 
 python3Packages.buildPythonApplication rec {
   pname = "pyradio";
-  version = "0.8.9.14";
+  version = "0.8.9.15";
 
   propagatedBuildInputs = with python3Packages; [
     requests
@@ -14,7 +14,7 @@ python3Packages.buildPythonApplication rec {
     owner = "coderholic";
     repo = pname;
     rev = version;
-    sha256 = "sha256-9q+YsQPFB7Ql5WnXvPj100cD7pGkmr1hHztqbpZStt8=";
+    sha256 = "sha256-r4T7t8Q46N59jqTkvdKBo6tffkrOYhoO/CZWvkBHOAQ=";
   };
 
   checkPhase = ''
diff --git a/pkgs/applications/audio/vocal/default.nix b/pkgs/applications/audio/vocal/default.nix
index 3d6f3aef9e1..0c80dda703e 100644
--- a/pkgs/applications/audio/vocal/default.nix
+++ b/pkgs/applications/audio/vocal/default.nix
@@ -56,7 +56,6 @@ stdenv.mkDerivation rec {
     libgee
     libnotify
     libunity
-    pantheon.elementary-icon-theme
     pantheon.granite
     sqlite
     webkitgtk
@@ -73,6 +72,13 @@ stdenv.mkDerivation rec {
     })
   ];
 
+  postPatch = ''
+    # Fix build with vala 0.56
+    # https://github.com/needle-and-thread/vocal/pull/503
+    substituteInPlace src/Vocal.vala \
+      --replace "public const OptionEntry[] app_options" "private const OptionEntry[] app_options"
+  '';
+
   passthru = {
     updateScript = nix-update-script {
       attrPath = pname;
diff --git a/pkgs/applications/blockchains/alfis/default.nix b/pkgs/applications/blockchains/alfis/default.nix
index 2c8526c6292..28ce7512129 100644
--- a/pkgs/applications/blockchains/alfis/default.nix
+++ b/pkgs/applications/blockchains/alfis/default.nix
@@ -14,16 +14,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "alfis";
-  version = "0.6.10";
+  version = "0.6.11";
 
   src = fetchFromGitHub {
     owner = "Revertron";
     repo = "Alfis";
     rev = "v${version}";
-    sha256 = "sha256-JJTU3wZ3cG5TmgHYShWJaNAZBA4z3qZXPfb7WUX6/80=";
+    sha256 = "sha256-vm/JBJh58UaSem18RpJuPUzM2GCy4RfCb6Hr1B7KWQA=";
   };
 
-  cargoSha256 = "sha256-BsFe1Fp+Q5Gqa1w4xov0tVLDKV7S+6b5fKBl09ggLB0=";
+  cargoSha256 = "sha256-8ijGO8up0qVQ/kVX5/DveKyovYLh7jm+d7vooS1waAA=";
 
   checkFlags = [
     # these want internet access, disable them
diff --git a/pkgs/applications/blockchains/chia/default.nix b/pkgs/applications/blockchains/chia/default.nix
index f912ef41677..e8527559c3a 100644
--- a/pkgs/applications/blockchains/chia/default.nix
+++ b/pkgs/applications/blockchains/chia/default.nix
@@ -6,14 +6,14 @@
 
 let chia = python3Packages.buildPythonApplication rec {
   pname = "chia";
-  version = "1.3.0";
+  version = "1.3.1";
 
   src = fetchFromGitHub {
     owner = "Chia-Network";
     repo = "chia-blockchain";
     rev = version;
     fetchSubmodules = true;
-    hash = "sha256-eUvZc/7gqGkCB2tNKdwqKOmOgEcG9a/7TSvvyQzhNcQ=";
+    hash = "sha256-nH6rCzIQu5oWsdEHa+UkvbWeUGjrtpEKVEcLmSoor5k=";
   };
 
   postPatch = ''
@@ -50,6 +50,7 @@ let chia = python3Packages.buildPythonApplication rec {
     dnslib
     dnspythonchia
     fasteners
+    filelock
     keyrings-cryptfile
     pyyaml
     setproctitle
diff --git a/pkgs/applications/blockchains/polkadot/default.nix b/pkgs/applications/blockchains/polkadot/default.nix
index 1210caaa251..e008832d4f0 100644
--- a/pkgs/applications/blockchains/polkadot/default.nix
+++ b/pkgs/applications/blockchains/polkadot/default.nix
@@ -8,13 +8,13 @@
 }:
 rustPlatform.buildRustPackage rec {
   pname = "polkadot";
-  version = "0.9.16";
+  version = "0.9.17";
 
   src = fetchFromGitHub {
     owner = "paritytech";
     repo = "polkadot";
     rev = "v${version}";
-    sha256 = "sha256-NXuYUmo80rrBZCcuISKon48SKyyJrkzCEhggxaJNfBM=";
+    sha256 = "sha256-m47Y4IXGc43XLs5d6ehlD0A53BWC5kO3K2BS/xbYgl8=";
 
     # see the comment below on fakeGit for how this is used
     leaveDotGit = true;
@@ -24,7 +24,7 @@ rustPlatform.buildRustPackage rec {
     '';
   };
 
-  cargoSha256 = "sha256-PIORMTzQbMdlrKwuF4MiGrLlg2nQpgLRsaHHeiCbqrg=";
+  cargoSha256 = "sha256-JBacioy2woAfKQuK6tXU9as4DNc+3uY3F3GWksCf6WU=";
 
   nativeBuildInputs =
     let
diff --git a/pkgs/applications/editors/cudatext/default.nix b/pkgs/applications/editors/cudatext/default.nix
index 2b58ba50bca..510873f200e 100644
--- a/pkgs/applications/editors/cudatext/default.nix
+++ b/pkgs/applications/editors/cudatext/default.nix
@@ -38,13 +38,13 @@ let
 in
 stdenv.mkDerivation rec {
   pname = "cudatext";
-  version = "1.156.2";
+  version = "1.158.2";
 
   src = fetchFromGitHub {
     owner = "Alexey-T";
     repo = "CudaText";
     rev = version;
-    sha256 = "sha256-waVTNyK3OHpOvBJrXio+Xjn9q3WmUczbx3E26ChsuKo=";
+    sha256 = "sha256-YrRG+LaG39q/6Ry3cXo9XUwtvokkBl96XuQfE22QxZI=";
   };
 
   postPatch = ''
diff --git a/pkgs/applications/editors/cudatext/deps.json b/pkgs/applications/editors/cudatext/deps.json
index 9ae3c1c33ca..5e0f37c78b6 100644
--- a/pkgs/applications/editors/cudatext/deps.json
+++ b/pkgs/applications/editors/cudatext/deps.json
@@ -1,23 +1,23 @@
 {
   "EncConv": {
     "owner": "Alexey-T",
-    "rev": "2021.01.01",
-    "sha256": "18fp7yz2rl80a6xw7v4bgc4092l74fb6p6z4yf312r7gw7b8naq6"
+    "rev": "2022.03.02",
+    "sha256": "sha256-fBN6Ix5CqhzMbNrWGn6nw6+JRDmEfqe6o8JGHERkiPE="
   },
   "ATBinHex-Lazarus": {
     "owner": "Alexey-T",
-    "rev": "2021.07.02",
-    "sha256": "sha256-XSt2XfHMiF/ZIf07M65u+k5FjacyToL0rWbpcflKcuY="
+    "rev": "2021.11.17",
+    "sha256": "sha256-wdYH0sISFNx42zt07gLn9ANxcyrq3WrbRhWfTFgPQWw="
   },
   "ATFlatControls": {
     "owner": "Alexey-T",
-    "rev": "2021.11.11",
-    "sha256": "sha256-lbRRiA8CHWmosJefTHrP2cTgU8nlK1SmNcppG6Bl54I="
+    "rev": "2022.03.17",
+    "sha256": "sha256-zpirFZcqIT53tZhgxQGdwVB6pA98SQLr1o3f+Lhq2QY="
   },
   "ATSynEdit": {
     "owner": "Alexey-T",
-    "rev": "2022.02.19",
-    "sha256": "sha256-cq2dirFNPaWRmZJu0F+CFA//+SuFOOpTH3Q5zL4oPQo="
+    "rev": "2022.03.17",
+    "sha256": "sha256-aJZGHodydkqfe2BJLKWUzIX6vbdiGKs4z5ZqtteM6NU="
   },
   "ATSynEdit_Cmp": {
     "owner": "Alexey-T",
@@ -26,13 +26,13 @@
   },
   "EControl": {
     "owner": "Alexey-T",
-    "rev": "2022.02.02",
-    "sha256": "sha256-T/6SQJHKzbv/PlObDyc9bcpC14krHgcLDQn0v2fNkLM="
+    "rev": "2022.03.17",
+    "sha256": "sha256-sWRKRhUYf07TIrVWRqtpsYPZu0dPm0EhSIqoDLmkG0Y="
   },
   "ATSynEdit_Ex": {
     "owner": "Alexey-T",
-    "rev": "2022.02.01",
-    "sha256": "sha256-FAcq6ixmFPQFBAGG2gqB4T+YGYT+Rh/OlKdGcH/iL3g="
+    "rev": "2022.03.17",
+    "sha256": "sha256-FndLHJuCOyFr0IGUL4zFRjkEvTyNF3tHUO/Wx5IaV2Y="
   },
   "Python-for-Lazarus": {
     "owner": "Alexey-T",
diff --git a/pkgs/applications/emulators/vice/default.nix b/pkgs/applications/emulators/vice/default.nix
index 13457823a23..45fb03f1ac4 100644
--- a/pkgs/applications/emulators/vice/default.nix
+++ b/pkgs/applications/emulators/vice/default.nix
@@ -23,6 +23,108 @@
 , file
 }:
 
+let
+  desktopItems = [
+    (makeDesktopItem {
+      name = "x128";
+      exec = "x128";
+      comment = "VICE: C128 Emulator";
+      desktopName = "VICE: C128 Emulator";
+      genericName = "Commodore 128 emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "x64";
+      exec = "x64";
+      comment = "VICE: C64 Emulator";
+      desktopName = "VICE: C64 Emulator";
+      genericName = "Commodore 64 emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "x64dtv";
+      exec = "x64dtv";
+      comment = "VICE: C64 DTV Emulator";
+      desktopName = "VICE: C64 DTV Emulator";
+      genericName = "Commodore 64 DTV emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "x64sc";
+      exec = "x64sc";
+      comment = "VICE: C64 SC Emulator";
+      desktopName = "VICE: C64 SC Emulator";
+      genericName = "Commodore 64 SC emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "xcbm2";
+      exec = "xcbm2";
+      comment = "VICE: CBM-II B-Model Emulator";
+      desktopName = "VICE: CBM-II B-Model Emulator";
+      genericName = "CBM-II B-Model Emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "xcbm5x0";
+      exec = "xcbm5x0";
+      comment = "VICE: CBM-II P-Model Emulator";
+      desktopName = "VICE: CBM-II P-Model Emulator";
+      genericName = "CBM-II P-Model Emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "xpet";
+      exec = "xpet";
+      comment = "VICE: PET Emulator";
+      desktopName = "VICE: PET Emulator";
+      genericName = "Commodore PET Emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "xplus4";
+      exec = "xplus4";
+      comment = "VICE: PLUS4 Emulator";
+      desktopName = "VICE: PLUS4 Emulator";
+      genericName = "Commodore PLUS4 Emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "xscpu64";
+      exec = "xscpu64";
+      comment = "VICE: SCPU64 Emulator";
+      desktopName = "VICE: SCPU64 Emulator";
+      genericName = "Commodore SCPU64 Emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "xvic";
+      exec = "xvic";
+      comment = "VICE: VIC-20 Emulator";
+      desktopName = "VICE: VIC-20 Emulator";
+      genericName = "Commodore VIC-20 Emulator";
+      categories = [ "System" ];
+    })
+
+    (makeDesktopItem {
+      name = "vsid";
+      exec = "vsid";
+      comment = "VSID: The SID Emulator";
+      desktopName = "VSID: The SID Emulator";
+      genericName = "SID Emulator";
+      categories = [ "System" ];
+    })
+  ];
+in
 stdenv.mkDerivation rec {
   pname = "vice";
   version = "3.6.1";
@@ -59,15 +161,6 @@ stdenv.mkDerivation rec {
   dontDisableStatic = true;
   configureFlags = [ "--enable-fullscreen" "--enable-gnomeui" "--disable-pdf-docs" ];
 
-  desktopItem = makeDesktopItem {
-    name = "vice";
-    exec = "x64";
-    comment = "Commodore 64 emulator";
-    desktopName = "VICE";
-    genericName = "Commodore 64 emulator";
-    categories = [ "Emulator" ];
-  };
-
   preBuild = ''
     for i in src/resid src/resid-dtv
     do
@@ -77,12 +170,15 @@ stdenv.mkDerivation rec {
   '';
 
   postInstall = ''
-    mkdir -p $out/share/applications
-    cp ${desktopItem}/share/applications/* $out/share/applications
+    for app in ${toString desktopItems}
+    do
+        mkdir -p $out/share/applications
+        cp $app/share/applications/* $out/share/applications
+    done
   '';
 
   meta = {
-    description = "Commodore 64, 128 and other emulators";
+    description = "Emulators for a variety of 8-bit Commodore computers";
     homepage = "https://vice-emu.sourceforge.io/";
     license = lib.licenses.gpl2Plus;
     maintainers = [ lib.maintainers.sander ];
diff --git a/pkgs/applications/emulators/wine/sources.nix b/pkgs/applications/emulators/wine/sources.nix
index b98aceddbd4..23538a237e4 100644
--- a/pkgs/applications/emulators/wine/sources.nix
+++ b/pkgs/applications/emulators/wine/sources.nix
@@ -46,9 +46,9 @@ in rec {
 
   unstable = fetchurl rec {
     # NOTE: Don't forget to change the SHA256 for staging as well.
-    version = "7.2";
+    version = "7.4";
     url = "https://dl.winehq.org/wine/source/7.x/wine-${version}.tar.xz";
-    sha256 = "sha256-38ZBUjyNvGZBaLYXREFjPZcSdUVr9n3i3KqZyNql7hU=";
+    sha256 = "sha256-co6GbW5JzpKioMUUMz6f8Ivb9shvXvTmGAFDuNK31BY=";
     inherit (stable) gecko32 gecko64 patches;
 
     mono = fetchurl rec {
@@ -61,7 +61,7 @@ in rec {
   staging = fetchFromGitHub rec {
     # https://github.com/wine-staging/wine-staging/releases
     inherit (unstable) version;
-    sha256 = "sha256-Ec9rienlsDg+2QkJqPrGorDb5NycG1/iGWhnqLZOrwg=";
+    sha256 = "0vlj3b8bnidyhlgkjrnlbah3878zjy3s557vbp16qka42zjaa51q";
     owner = "wine-staging";
     repo = "wine-staging";
     rev = "v${version}";
diff --git a/pkgs/applications/emulators/wine/vkd3d.nix b/pkgs/applications/emulators/wine/vkd3d.nix
index ac7c399cd97..4f06b886e23 100644
--- a/pkgs/applications/emulators/wine/vkd3d.nix
+++ b/pkgs/applications/emulators/wine/vkd3d.nix
@@ -1,16 +1,18 @@
-{ lib, stdenv, fetchurl, moltenvk, vulkan-headers, spirv-headers, vulkan-loader }:
+{ lib, stdenv, fetchurl, moltenvk, vulkan-headers, spirv-headers, vulkan-loader, flex, bison }:
 
 #TODO: unstable
 
 stdenv.mkDerivation rec {
   pname = "vkd3d";
-  version = "1.2";
+  version = "1.3";
 
   src = fetchurl {
     url = "https://dl.winehq.org/vkd3d/source/vkd3d-${version}.tar.xz";
-    sha256 = "0szr1lw3xbgi9qjm13d1q4gyzzwv8i5wfxiwjg6dmwphrc7h6jxh";
+    sha256 = "134b347806d34a4d2b39ea29ff1c2b38443793803a3adc50800855bb929fb8b2";
   };
 
+  nativeBuildInputs = [ flex bison ];
+
   buildInputs = [ vulkan-headers spirv-headers ]
     ++ [ (if stdenv.isDarwin then moltenvk else vulkan-loader) ];
 
diff --git a/pkgs/applications/graphics/hydrus/default.nix b/pkgs/applications/graphics/hydrus/default.nix
index 12e605f80c4..fba158efacb 100644
--- a/pkgs/applications/graphics/hydrus/default.nix
+++ b/pkgs/applications/graphics/hydrus/default.nix
@@ -10,18 +10,19 @@
 
 python3Packages.buildPythonPackage rec {
   pname = "hydrus";
-  version = "474";
+  version = "477";
   format = "other";
 
   src = fetchFromGitHub {
     owner = "hydrusnetwork";
     repo = "hydrus";
     rev = "v${version}";
-    sha256 = "sha256-NeTHq8zlgBajw/eogwpabqeU0b7cp83Frqy6kisrths=";
+    sha256 = "sha256-/Gehlk+eMBPA+OT7xsTri6PDi2PBmzjckMVbqPGXT64=";
   };
 
   nativeBuildInputs = [
     wrapQtAppsHook
+    python3Packages.mkdocs-material
   ];
 
   propagatedBuildInputs = with python3Packages; [
@@ -85,6 +86,7 @@ python3Packages.buildPythonPackage rec {
     # Move the hydrus module and related directories
     mkdir -p $out/${python3Packages.python.sitePackages}
     mv {hydrus,static} $out/${python3Packages.python.sitePackages}
+    mkdocs build -d help
     mv help $out/doc/
 
     # install the hydrus binaries
diff --git a/pkgs/applications/kde/konsole.nix b/pkgs/applications/kde/konsole.nix
index 098001ef4c2..23cbdf28477 100644
--- a/pkgs/applications/kde/konsole.nix
+++ b/pkgs/applications/kde/konsole.nix
@@ -1,5 +1,5 @@
 {
-  mkDerivation, lib,
+  mkDerivation, lib, nixosTests,
   extra-cmake-modules, kdoctools,
   kbookmarks, kcompletion, kconfig, kconfigwidgets, kcoreaddons, kguiaddons,
   ki18n, kiconthemes, kinit, kio, knotifications,
@@ -22,5 +22,7 @@ mkDerivation {
     kservice ktextwidgets kwidgetsaddons kwindowsystem kxmlgui qtscript knewstuff
   ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.konsole;
+
   propagatedUserEnvPkgs = [ (lib.getBin kinit) ];
 }
diff --git a/pkgs/applications/misc/appeditor/default.nix b/pkgs/applications/misc/appeditor/default.nix
index cf1303422d2..9e38a208e17 100644
--- a/pkgs/applications/misc/appeditor/default.nix
+++ b/pkgs/applications/misc/appeditor/default.nix
@@ -43,6 +43,11 @@ stdenv.mkDerivation rec {
   ];
 
   postPatch = ''
+    # Fix build with vala 0.56
+    # https://github.com/donadigo/appeditor/pull/122
+    substituteInPlace src/Application.vala \
+      --replace "private static string? create_exec_filename;" "public static string? create_exec_filename;"
+
     chmod +x meson/post_install.py
     patchShebangs meson/post_install.py
   '';
diff --git a/pkgs/applications/misc/blender/default.nix b/pkgs/applications/misc/blender/default.nix
index 31c1d4a1993..a0467b81714 100644
--- a/pkgs/applications/misc/blender/default.nix
+++ b/pkgs/applications/misc/blender/default.nix
@@ -1,11 +1,11 @@
 { config, stdenv, lib, fetchurl, fetchzip, boost, cmake, ffmpeg, gettext, glew
 , ilmbase, libXi, libX11, libXext, libXrender
 , libjpeg, libpng, libsamplerate, libsndfile
-, libtiff, libGLU, libGL, openal, opencolorio, openexr, openimagedenoise, openimageio2, openjpeg, python39Packages
+, libtiff, libGLU, libGL, openal, opencolorio, openexr, openimagedenoise, openimageio2, openjpeg, python310Packages
 , openvdb, libXxf86vm, tbb, alembic
-, zlib, fftw, opensubdiv, freetype, jemalloc, ocl-icd, addOpenGLRunpath
+, zlib, zstd, fftw, opensubdiv, freetype, jemalloc, ocl-icd, addOpenGLRunpath
 , jackaudioSupport ? false, libjack2
-, cudaSupport ? config.cudaSupport or false, cudatoolkit
+, cudaSupport ? config.cudaSupport or false, cudatoolkit_11
 , colladaSupport ? true, opencollada
 , spaceNavSupport ? stdenv.isLinux, libspnav
 , makeWrapper
@@ -17,30 +17,31 @@
 
 with lib;
 let
-  python = python39Packages.python;
+  python = python310Packages.python;
   optix = fetchzip {
-    url = "https://developer.download.nvidia.com/redist/optix/v7.0/OptiX-7.0.0-include.zip";
-    sha256 = "1b3ccd3197anya2bj3psxdrvrpfgiwva5zfv2xmyrl73nb2dvfr7";
+    # url taken from the archlinux blender PKGBUILD
+    url = "https://developer.download.nvidia.com/redist/optix/v7.3/OptiX-7.3.0-Include.zip";
+    sha256 = "0max1j4822mchj0xpz9lqzh91zkmvsn4py0r174cvqfz8z8ykjk8";
   };
 
 in
 stdenv.mkDerivation rec {
   pname = "blender";
-  version = "2.93.5";
+  version = "3.1.0";
 
   src = fetchurl {
     url = "https://download.blender.org/source/${pname}-${version}.tar.xz";
-    sha256 = "1fsw8w80h8k5w4zmy659bjlzqyn5i198hi1kbpzfrdn8psxg2bfj";
+    sha256 = "1d0476bzcz86lwdnyjn7hyzkmhfiqh47ls5h09jlbm7v7k9x69hw";
   };
 
   patches = lib.optional stdenv.isDarwin ./darwin.patch;
 
-  nativeBuildInputs = [ cmake makeWrapper python39Packages.wrapPython llvmPackages.llvm.dev ]
+  nativeBuildInputs = [ cmake makeWrapper python310Packages.wrapPython llvmPackages.llvm.dev ]
     ++ optionals cudaSupport [ addOpenGLRunpath ];
   buildInputs =
     [ boost ffmpeg gettext glew ilmbase
       freetype libjpeg libpng libsamplerate libsndfile libtiff
-      opencolorio openexr openimagedenoise openimageio2 openjpeg python zlib fftw jemalloc
+      opencolorio openexr openimagedenoise openimageio2 openjpeg python zlib zstd fftw jemalloc
       alembic
       (opensubdiv.override { inherit cudaSupport; })
       tbb
@@ -62,10 +63,10 @@ stdenv.mkDerivation rec {
       llvmPackages.openmp SDL Cocoa CoreGraphics ForceFeedback OpenAL OpenGL
     ])
     ++ optional jackaudioSupport libjack2
-    ++ optional cudaSupport cudatoolkit
+    ++ optional cudaSupport cudatoolkit_11
     ++ optional colladaSupport opencollada
     ++ optional spaceNavSupport libspnav;
-  pythonPath = with python39Packages; [ numpy requests ];
+  pythonPath = with python310Packages; [ numpy requests ];
 
   postPatch = ''
     # allow usage of dynamically linked embree
@@ -84,7 +85,7 @@ stdenv.mkDerivation rec {
         --replace '${"$"}{LIBDIR}/opencollada' \
                   '${opencollada}' \
         --replace '${"$"}{PYTHON_LIBPATH}/site-packages/numpy' \
-                  '${python39Packages.numpy}/${python.sitePackages}/numpy'
+                  '${python310Packages.numpy}/${python.sitePackages}/numpy'
     '' else ''
       substituteInPlace extern/clew/src/clew.c --replace '"libOpenCL.so"' '"${ocl-icd}/lib/libOpenCL.so"'
     '');
@@ -106,8 +107,8 @@ stdenv.mkDerivation rec {
       "-DPYTHON_VERSION=${python.pythonVersion}"
       "-DWITH_PYTHON_INSTALL=OFF"
       "-DWITH_PYTHON_INSTALL_NUMPY=OFF"
-      "-DPYTHON_NUMPY_PATH=${python39Packages.numpy}/${python.sitePackages}"
-      "-DPYTHON_NUMPY_INCLUDE_DIRS=${python39Packages.numpy}/${python.sitePackages}/numpy/core/include"
+      "-DPYTHON_NUMPY_PATH=${python310Packages.numpy}/${python.sitePackages}"
+      "-DPYTHON_NUMPY_INCLUDE_DIRS=${python310Packages.numpy}/${python.sitePackages}/numpy/core/include"
       "-DWITH_PYTHON_INSTALL_REQUESTS=OFF"
       "-DWITH_OPENVDB=ON"
       "-DWITH_TBB=ON"
diff --git a/pkgs/applications/misc/regextester/default.nix b/pkgs/applications/misc/regextester/default.nix
index b7eb2367528..2a00cb92b9d 100644
--- a/pkgs/applications/misc/regextester/default.nix
+++ b/pkgs/applications/misc/regextester/default.nix
@@ -37,7 +37,6 @@ stdenv.mkDerivation rec {
   ];
 
   buildInputs = [
-    pantheon.elementary-icon-theme
     pantheon.granite
     glib
     libgee
diff --git a/pkgs/applications/misc/tootle/default.nix b/pkgs/applications/misc/tootle/default.nix
index f29c471d04b..c11ad8d1e98 100644
--- a/pkgs/applications/misc/tootle/default.nix
+++ b/pkgs/applications/misc/tootle/default.nix
@@ -68,6 +68,11 @@ stdenv.mkDerivation rec {
   ];
 
   postPatch = ''
+    # Fix build with vala 0.56
+    # https://github.com/bleakgrey/tootle/pull/346
+    substituteInPlace src/Application.vala \
+      --replace "public const GLib.ActionEntry[] app_entries" "private const GLib.ActionEntry[] app_entries"
+
     chmod +x meson/post_install.py
     patchShebangs meson/post_install.py
   '';
diff --git a/pkgs/applications/networking/browsers/palemoon/default.nix b/pkgs/applications/networking/browsers/palemoon/default.nix
index e33ff3f3ca6..49dcfe0f9c4 100644
--- a/pkgs/applications/networking/browsers/palemoon/default.nix
+++ b/pkgs/applications/networking/browsers/palemoon/default.nix
@@ -6,7 +6,7 @@
 , dbus
 , dbus-glib
 , desktop-file-utils
-, fetchzip
+, fetchFromGitea
 , ffmpeg
 , fontconfig
 , freetype
@@ -44,12 +44,15 @@ assert with lib.strings; (
 
 stdenv.mkDerivation rec {
   pname = "palemoon";
-  version = "29.4.4";
-
-  src = fetchzip {
-    name = "${pname}-${version}";
-    url = "http://archive.palemoon.org/source/${pname}-${version}.source.tar.xz";
-    sha256 = "sha256-0R0IJd4rd7NqnxQxkHSx10cNlwECqpKgJnlfYAMx4wc=";
+  version = "30.0.0";
+
+  src = fetchFromGitea {
+    domain = "repo.palemoon.org";
+    owner = "MoonchildProductions";
+    repo = "Pale-Moon";
+    rev = "${version}_Release";
+    fetchSubmodules = true;
+    sha256 = "02qdw8b7hphphc66m3m14r4pmcfiq2c5z4jcscm2nymy18ycb10f";
   };
 
   nativeBuildInputs = [
@@ -137,24 +140,15 @@ stdenv.mkDerivation rec {
 
     ./mach install
 
-    # Fix missing icon due to wrong WMClass
-    # https://forum.palemoon.org/viewtopic.php?f=3&t=26746&p=214221#p214221
-    substituteInPlace ./palemoon/branding/official/palemoon.desktop \
-      --replace 'StartupWMClass="pale moon"' 'StartupWMClass=Pale moon'
+    # Install official branding stuff (desktop file & icons)
     desktop-file-install --dir=$out/share/applications \
-      ./palemoon/branding/official/palemoon.desktop
-
-    # Install official branding icons
+      ./other-licenses/branding/palemoon/official/palemoon.desktop
     for iconname in default{16,22,24,32,48,256} mozicon128; do
       n=''${iconname//[^0-9]/}
       size=$n"x"$n
-      install -Dm644 ./palemoon/branding/official/$iconname.png $out/share/icons/hicolor/$size/apps/palemoon.png
+      install -Dm644 ./other-licenses/branding/palemoon/official/$iconname.png $out/share/icons/hicolor/$size/apps/palemoon.png
     done
 
-    # Remove unneeded SDK data from installation
-    # https://forum.palemoon.org/viewtopic.php?f=37&t=26796&p=214676#p214729
-    rm -rf $out/{include,share/idl,lib/palemoon-devel-${version}}
-
     runHook postInstall
   '';
 
diff --git a/pkgs/applications/networking/browsers/palemoon/mozconfig b/pkgs/applications/networking/browsers/palemoon/mozconfig
index 0eab96e5846..c7674d342c2 100644
--- a/pkgs/applications/networking/browsers/palemoon/mozconfig
+++ b/pkgs/applications/networking/browsers/palemoon/mozconfig
@@ -12,7 +12,7 @@ _BUILD_64=@build64@
 _GTK_VERSION=@gtkversion@
 
 # Standard build options for Pale Moon
-ac_add_options --enable-application=palemoon
+ac_add_options --enable-application=browser
 ac_add_options --enable-optimize="-O2 -w"
 ac_add_options --enable-default-toolkit=cairo-gtk$_GTK_VERSION
 ac_add_options --enable-jemalloc
@@ -20,8 +20,6 @@ ac_add_options --enable-strip
 ac_add_options --enable-devtools
 ac_add_options --enable-av1
 
-ac_add_options --disable-eme
-ac_add_options --disable-webrtc
 ac_add_options --disable-gamepad
 ac_add_options --disable-tests
 ac_add_options --disable-debug
diff --git a/pkgs/applications/networking/cawbird/default.nix b/pkgs/applications/networking/cawbird/default.nix
index 74074d23242..dad293e51a2 100644
--- a/pkgs/applications/networking/cawbird/default.nix
+++ b/pkgs/applications/networking/cawbird/default.nix
@@ -23,14 +23,14 @@
 }:
 
 stdenv.mkDerivation rec {
-  version = "1.4.2";
+  version = "1.5";
   pname = "cawbird";
 
   src = fetchFromGitHub {
     owner = "IBBoard";
     repo = "cawbird";
     rev = "v${version}";
-    sha256 = "17575cp5qcgsqf37y3xqg3vr6l2j8bbbkmy2c1l185rxghfacida";
+    sha256 = "sha256-XFN9gfCoQDmYYysg1yrUoPPE0Ow40LttvV5Ltu0DTfI=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/applications/networking/cluster/fn-cli/default.nix b/pkgs/applications/networking/cluster/fn-cli/default.nix
index df44c25cdc5..22e596dabf2 100644
--- a/pkgs/applications/networking/cluster/fn-cli/default.nix
+++ b/pkgs/applications/networking/cluster/fn-cli/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "fn";
-  version = "0.6.15";
+  version = "0.6.17";
 
   src = fetchFromGitHub {
     owner = "fnproject";
     repo = "cli";
     rev = version;
-    sha256 = "sha256-Gf4YxxyNALicb9GPpOW+Kzb2xrwcAU1XYOzXochH0LI=";
+    sha256 = "sha256-u/YISLlZFYlAUejSlaH7POA2WwKURPN8phFU86/caXU=";
   };
 
   vendorSha256 = null;
diff --git a/pkgs/applications/networking/cluster/werf/default.nix b/pkgs/applications/networking/cluster/werf/default.nix
index 5a52061b7c3..c55ccfa02fa 100644
--- a/pkgs/applications/networking/cluster/werf/default.nix
+++ b/pkgs/applications/networking/cluster/werf/default.nix
@@ -11,15 +11,15 @@
 
 buildGoModule rec {
   pname = "werf";
-  version = "1.2.76";
+  version = "1.2.77";
 
   src = fetchFromGitHub {
     owner = "werf";
     repo = "werf";
     rev = "v${version}";
-    sha256 = "sha256-OdMY7M9HCYtQ5v3yTjS1CJXDmg9bLA5LdcIxT7C3rcw=";
+    sha256 = "sha256-JmadwNERjexnJN+fBUjgMkvPtAaTbb7GITPsZlx2vik=";
   };
-  vendorSha256 = "sha256-q2blcmh1mHCfjDbeR3KQevjeDtBm0TwhhBIAvF55X00=";
+  vendorSha256 = "sha256-IPQiS1GgNP+k/INv3f3VitoHActC3MrRys905nTSXyI=";
   proxyVendor = true;
 
   nativeBuildInputs = [ pkg-config ];
diff --git a/pkgs/applications/networking/nextcloud-client/default.nix b/pkgs/applications/networking/nextcloud-client/default.nix
index 3a63f98c44c..deac64c903e 100644
--- a/pkgs/applications/networking/nextcloud-client/default.nix
+++ b/pkgs/applications/networking/nextcloud-client/default.nix
@@ -22,13 +22,13 @@
 
 mkDerivation rec {
   pname = "nextcloud-client";
-  version = "3.4.3";
+  version = "3.4.4";
 
   src = fetchFromGitHub {
     owner = "nextcloud";
     repo = "desktop";
     rev = "v${version}";
-    sha256 = "sha256-nryoueoqnbBAJaU11OUXKP5PNrYf4515ojBkdMFIEMA=";
+    sha256 = "sha256-e4me4mpK0N3UyM5MuJP3jxwM5h1dGBd+JzAr5f3BOGQ=";
   };
 
   patches = [
diff --git a/pkgs/applications/networking/sync/rclone/default.nix b/pkgs/applications/networking/sync/rclone/default.nix
index e23ba5cdf2a..9ebd9db67f9 100644
--- a/pkgs/applications/networking/sync/rclone/default.nix
+++ b/pkgs/applications/networking/sync/rclone/default.nix
@@ -40,8 +40,13 @@ buildGoModule rec {
         ${rcloneBin}/bin/rclone genautocomplete $shell rclone.$shell
         installShellCompletion rclone.$shell
       done
-    '' + lib.optionalString (enableCmount && !stdenv.isDarwin) ''
-      wrapProgram $out/bin/rclone --prefix LD_LIBRARY_PATH : "${fuse}/lib"
+    '' + lib.optionalString (enableCmount && !stdenv.isDarwin)
+      # use --suffix here to ensure we don't shadow /run/wrappers/bin/fusermount,
+      # as the setuid wrapper is required as non-root on NixOS.
+      ''
+      wrapProgram $out/bin/rclone \
+                  --suffix PATH : "${lib.makeBinPath [ fuse ] }" \
+                  --prefix LD_LIBRARY_PATH : "${fuse}/lib"
     '';
 
   meta = with lib; {
diff --git a/pkgs/applications/office/bookworm/default.nix b/pkgs/applications/office/bookworm/default.nix
index 8bd1d8f974b..100818ad249 100644
--- a/pkgs/applications/office/bookworm/default.nix
+++ b/pkgs/applications/office/bookworm/default.nix
@@ -26,7 +26,6 @@ stdenv.mkDerivation rec {
   ];
 
   buildInputs = [
-    pantheon.elementary-icon-theme
     pantheon.granite
     glib
     libgee
diff --git a/pkgs/applications/office/spice-up/default.nix b/pkgs/applications/office/spice-up/default.nix
index 1de5a8b817b..9c716dfd9a4 100644
--- a/pkgs/applications/office/spice-up/default.nix
+++ b/pkgs/applications/office/spice-up/default.nix
@@ -46,7 +46,6 @@ stdenv.mkDerivation rec {
     libgee
     libgudev
     libsoup
-    pantheon.elementary-icon-theme
     pantheon.granite
   ];
 
diff --git a/pkgs/applications/office/zotero/default.nix b/pkgs/applications/office/zotero/default.nix
index 6fb8aecbbca..53c61f0d993 100644
--- a/pkgs/applications/office/zotero/default.nix
+++ b/pkgs/applications/office/zotero/default.nix
@@ -1,4 +1,8 @@
-{ lib, stdenv, fetchurl, wrapGAppsHook, makeDesktopItem
+{ lib
+, stdenv
+, fetchurl
+, wrapGAppsHook
+, makeDesktopItem
 , atk
 , cairo
 , coreutils
@@ -27,7 +31,8 @@
 , libXt
 , libnotify
 , gnome
-, libGLU, libGL
+, libGLU
+, libGL
 , nspr
 , nss
 , pango
@@ -36,55 +41,56 @@
 
 stdenv.mkDerivation rec {
   pname = "zotero";
-  version = "5.0.96.3";
+  version = "6.0";
 
   src = fetchurl {
-    url = "https://download.zotero.org/client/release/${version}/Zotero-${version}_linux-x86_64.tar.bz2";
-    sha256 = "sha256-eqSNzmkGNopGJ7VByvUffFEPJz3WHS7b5+jgUAW/hU4=";
+    url =
+      "https://download.zotero.org/client/release/${version}/Zotero-${version}_linux-x86_64.tar.bz2";
+    sha256 = "0zkgmmflcj6vbyv8rs51jf3vx1zq8pl7b5d5asgayhrdlwi0qgff";
   };
 
   nativeBuildInputs = [ wrapGAppsHook ];
-  buildInputs= [ gsettings-desktop-schemas glib gtk3 gnome.adwaita-icon-theme dconf ];
+  buildInputs =
+    [ gsettings-desktop-schemas glib gtk3 gnome.adwaita-icon-theme dconf ];
 
   dontConfigure = true;
   dontBuild = true;
   dontStrip = true;
   dontPatchELF = true;
 
-  libPath = lib.makeLibraryPath
-    [ stdenv.cc.cc
-      atk
-      cairo
-      curl
-      cups
-      dbus-glib
-      dbus
-      fontconfig
-      freetype
-      gdk-pixbuf
-      glib
-      glibc
-      gtk3
-      libX11
-      libXScrnSaver
-      libXcomposite
-      libXcursor
-      libxcb
-      libXdamage
-      libXext
-      libXfixes
-      libXi
-      libXinerama
-      libXrender
-      libXt
-      libnotify
-      libGLU libGL
-      nspr
-      nss
-      pango
-    ] + ":" + lib.makeSearchPathOutput "lib" "lib64" [
-      stdenv.cc.cc
-    ];
+  libPath = lib.makeLibraryPath [
+    stdenv.cc.cc
+    atk
+    cairo
+    curl
+    cups
+    dbus-glib
+    dbus
+    fontconfig
+    freetype
+    gdk-pixbuf
+    glib
+    glibc
+    gtk3
+    libX11
+    libXScrnSaver
+    libXcomposite
+    libXcursor
+    libxcb
+    libXdamage
+    libXext
+    libXfixes
+    libXi
+    libXinerama
+    libXrender
+    libXt
+    libnotify
+    libGLU
+    libGL
+    nspr
+    nss
+    pango
+  ] + ":" + lib.makeSearchPathOutput "lib" "lib64" [ stdenv.cc.cc ];
 
   postPatch = ''
     sed -i '/pref("app.update.enabled", true);/c\pref("app.update.enabled", false);' defaults/preferences/prefs.js
diff --git a/pkgs/applications/science/logic/isabelle/default.nix b/pkgs/applications/science/logic/isabelle/default.nix
index aaac288b615..1c613419cc8 100644
--- a/pkgs/applications/science/logic/isabelle/default.nix
+++ b/pkgs/applications/science/logic/isabelle/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchurl, coreutils, nettools, java, scala, polyml, z3, veriT, vampire, eprover-ho, rlwrap, perl, makeDesktopItem }:
+{ lib, stdenv, fetchurl, coreutils, nettools, java, scala, polyml, z3, veriT, vampire, eprover-ho, naproche, rlwrap, perl, makeDesktopItem }:
 # nettools needed for hostname
 
 stdenv.mkDerivation rec {
@@ -66,7 +66,8 @@ stdenv.mkDerivation rec {
       ISABELLE_JDK_HOME=${java}
     EOF
 
-    sed -i -e 's/naproche_server : bool = true/naproche_server : bool = false/' contrib/naproche-*/etc/options
+    rm contrib/naproche-*/x86*/Naproche-SAD
+    ln -s ${naproche}/bin/Naproche-SAD contrib/naproche-*/x86*/
 
     echo ISABELLE_LINE_EDITOR=${rlwrap}/bin/rlwrap >>etc/settings
 
diff --git a/pkgs/applications/science/logic/naproche/default.nix b/pkgs/applications/science/logic/naproche/default.nix
new file mode 100644
index 00000000000..5e286a9b57e
--- /dev/null
+++ b/pkgs/applications/science/logic/naproche/default.nix
@@ -0,0 +1,38 @@
+{ lib, fetchFromGitHub, haskellPackages, makeWrapper, eprover }:
+
+with haskellPackages; mkDerivation {
+  pname = "Naproche-SAD";
+  version = "0.1.0.0";
+
+  src = fetchFromGitHub {
+    owner = "naproche";
+    repo = "naproche";
+    rev = "d39cea85ace04d5b3775fde9972a33886799bfe6";
+    sha256 = "1zqrldmxkzbyg9bssrbwb00zic29904igcipaz1m9al0456yjnkf";
+  };
+
+  isExecutable = true;
+
+  buildTools = [ hpack makeWrapper ];
+  executableHaskellDepends = [
+    base array bytestring containers ghc-prim megaparsec mtl network process
+    split temporary text threads time transformers uuid
+  ];
+
+  prePatch = "hpack";
+
+  checkPhase = ''
+    export NAPROCHE_EPROVER=${eprover}/bin/eprover
+    dist/build/Naproche-SAD/Naproche-SAD examples/cantor.ftl.tex -t 60 --tex=on
+  '';
+
+  postInstall = ''
+    wrapProgram $out/bin/Naproche-SAD \
+      --set-default NAPROCHE_EPROVER ${eprover}/bin/eprover
+  '';
+
+  homepage = "https://github.com/naproche/naproche#readme";
+  description = "Write formal proofs in natural language and LaTeX";
+  maintainers = with lib.maintainers; [ jvanbruegge ];
+  license = lib.licenses.gpl3Only;
+}
diff --git a/pkgs/applications/science/math/nasc/default.nix b/pkgs/applications/science/math/nasc/default.nix
index 2fe027365cd..3e7a8b56af1 100644
--- a/pkgs/applications/science/math/nasc/default.nix
+++ b/pkgs/applications/science/math/nasc/default.nix
@@ -48,7 +48,6 @@ stdenv.mkDerivation rec {
     gtk3
     gtksourceview
     libgee
-    pantheon.elementary-icon-theme
     pantheon.granite
     webkitgtk
     # We add libqalculate's runtime dependencies because nasc has it as a modified subproject.
diff --git a/pkgs/applications/science/math/qalculate-gtk/default.nix b/pkgs/applications/science/math/qalculate-gtk/default.nix
index 7ba5838faba..4f68eb6ae78 100644
--- a/pkgs/applications/science/math/qalculate-gtk/default.nix
+++ b/pkgs/applications/science/math/qalculate-gtk/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "qalculate-gtk";
-  version = "4.0.0";
+  version = "4.1.0";
 
   src = fetchFromGitHub {
     owner = "qalculate";
     repo = "qalculate-gtk";
     rev = "v${version}";
-    sha256 = "sha256-l9lR5MVHWiRz5RG/I/nXRY4GQSSaXXP7PlRNoAu9+yo=";
+    sha256 = "sha256-EOiExp8JBc3SybSiBVbuRxBqTujzLjysWM0v94goups=";
   };
 
   hardeningDisable = [ "format" ];
diff --git a/pkgs/applications/terminal-emulators/alacritty/default.nix b/pkgs/applications/terminal-emulators/alacritty/default.nix
index e7730ba4b94..e6762f543cb 100644
--- a/pkgs/applications/terminal-emulators/alacritty/default.nix
+++ b/pkgs/applications/terminal-emulators/alacritty/default.nix
@@ -3,6 +3,7 @@
 , fetchFromGitHub
 , fetchpatch
 , rustPlatform
+, nixosTests
 
 , cmake
 , gzip
@@ -132,6 +133,8 @@ rustPlatform.buildRustPackage rec {
 
   dontPatchELF = true;
 
+  passthru.tests.test = nixosTests.terminal-emulators.alacritty;
+
   meta = with lib; {
     description = "A cross-platform, GPU-accelerated terminal emulator";
     homepage = "https://github.com/alacritty/alacritty";
diff --git a/pkgs/applications/terminal-emulators/contour/default.nix b/pkgs/applications/terminal-emulators/contour/default.nix
index 6ef16a5b9e4..d0def97e46f 100644
--- a/pkgs/applications/terminal-emulators/contour/default.nix
+++ b/pkgs/applications/terminal-emulators/contour/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, mkDerivation, fetchFromGitHub, cmake, pkg-config, freetype, libGL, pcre }:
+{ lib, stdenv, mkDerivation, fetchFromGitHub, cmake, pkg-config, freetype, libGL, pcre, nixosTests }:
 
 mkDerivation rec {
   pname = "contour";
@@ -16,6 +16,8 @@ mkDerivation rec {
 
   buildInputs = [ freetype libGL pcre ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.contour;
+
   meta = with lib; {
     description = "Modern C++ Terminal Emulator";
     homepage = "https://github.com/christianparpart/contour";
diff --git a/pkgs/applications/terminal-emulators/cool-retro-term/default.nix b/pkgs/applications/terminal-emulators/cool-retro-term/default.nix
index 049452c7e3a..4243502e7fe 100644
--- a/pkgs/applications/terminal-emulators/cool-retro-term/default.nix
+++ b/pkgs/applications/terminal-emulators/cool-retro-term/default.nix
@@ -1,5 +1,5 @@
 { lib, stdenv, fetchFromGitHub, mkDerivation, qtbase, qtquick1, qmltermwidget
-, qtquickcontrols, qtgraphicaleffects, qmake }:
+, qtquickcontrols, qtgraphicaleffects, qmake, nixosTests }:
 
 mkDerivation rec {
   version = "1.1.1";
@@ -29,6 +29,8 @@ mkDerivation rec {
     ln -s $out/bin/cool-retro-term.app/Contents/MacOS/cool-retro-term $out/bin/cool-retro-term
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.cool-retro-term;
+
   meta = {
     description = "Terminal emulator which mimics the old cathode display";
     longDescription = ''
diff --git a/pkgs/applications/terminal-emulators/ctx/default.nix b/pkgs/applications/terminal-emulators/ctx/default.nix
index 78c673d035a..420e9b020e9 100644
--- a/pkgs/applications/terminal-emulators/ctx/default.nix
+++ b/pkgs/applications/terminal-emulators/ctx/default.nix
@@ -8,6 +8,7 @@
 , libdrm # Not documented
 , pkg-config
 , enableFb ? false
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -43,6 +44,8 @@ stdenv.mkDerivation rec {
     "PREFIX=${placeholder "out"}"
   ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.ctx;
+
   meta = with lib; {
     homepage = "https://ctx.graphics/";
     description = "Vector graphics terminal";
diff --git a/pkgs/applications/terminal-emulators/darktile/default.nix b/pkgs/applications/terminal-emulators/darktile/default.nix
index f6323294950..17af8457cb1 100644
--- a/pkgs/applications/terminal-emulators/darktile/default.nix
+++ b/pkgs/applications/terminal-emulators/darktile/default.nix
@@ -12,6 +12,7 @@
 , libXext
 , libXxf86vm
 , libGL
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -57,6 +58,8 @@ stdenv.mkDerivation rec {
     runHook postInstall
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.darktile;
+
   meta = with lib; {
     description = "A GPU rendered terminal emulator designed for tiling window managers";
     homepage = "https://github.com/liamg/darktile";
diff --git a/pkgs/applications/terminal-emulators/eterm/default.nix b/pkgs/applications/terminal-emulators/eterm/default.nix
index 328e61f1fe5..769104fea43 100644
--- a/pkgs/applications/terminal-emulators/eterm/default.nix
+++ b/pkgs/applications/terminal-emulators/eterm/default.nix
@@ -8,6 +8,7 @@
 , libXext
 , libast
 , pkg-config
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -33,6 +34,8 @@ stdenv.mkDerivation rec {
     libast
   ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.eterm;
+
   meta = with lib; {
     homepage = "https://github.com/mej/Eterm"; # http://www.eterm.org is gone
     description = "Terminal emulator";
diff --git a/pkgs/applications/terminal-emulators/germinal/default.nix b/pkgs/applications/terminal-emulators/germinal/default.nix
index 1bbe87853f7..aef24f675b1 100644
--- a/pkgs/applications/terminal-emulators/germinal/default.nix
+++ b/pkgs/applications/terminal-emulators/germinal/default.nix
@@ -9,6 +9,7 @@
 , tmux
 , vte
 , wrapGAppsHook
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -45,6 +46,8 @@ stdenv.mkDerivation rec {
     runHook postFixup
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.germinal;
+
   meta = with lib; {
     description = "A minimal terminal emulator";
     homepage = "https://github.com/Keruspe/Germinal";
diff --git a/pkgs/applications/terminal-emulators/guake/default.nix b/pkgs/applications/terminal-emulators/guake/default.nix
index 8c4c6b383d5..cc2dc5c55d6 100644
--- a/pkgs/applications/terminal-emulators/guake/default.nix
+++ b/pkgs/applications/terminal-emulators/guake/default.nix
@@ -10,6 +10,7 @@
 , libutempter
 , vte
 , libwnck
+, nixosTests
 }:
 
 python3.pkgs.buildPythonApplication rec {
@@ -66,6 +67,8 @@ python3.pkgs.buildPythonApplication rec {
     gappsWrapperArgs+=(--prefix LD_LIBRARY_PATH : "${lib.makeLibraryPath [ libutempter ]}")
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.guake;
+
   meta = with lib; {
     description = "Drop-down terminal for GNOME";
     homepage = "http://guake-project.org";
diff --git a/pkgs/applications/terminal-emulators/hyper/default.nix b/pkgs/applications/terminal-emulators/hyper/default.nix
index a2b3d13b36a..485c1dead14 100644
--- a/pkgs/applications/terminal-emulators/hyper/default.nix
+++ b/pkgs/applications/terminal-emulators/hyper/default.nix
@@ -2,7 +2,7 @@
 , freetype, fontconfig, dbus, libXi, libXcursor, libXdamage, libXrandr, libXcomposite
 , libXext, libXfixes, libXrender, libX11, libXtst, libXScrnSaver, libxcb, nss, nspr
 , alsa-lib, cups, expat, udev, libpulseaudio, at-spi2-atk, at-spi2-core, libxshmfence
-, libdrm, libxkbcommon, mesa }:
+, libdrm, libxkbcommon, mesa, nixosTests}:
 
 let
   libPath = lib.makeLibraryPath [
@@ -43,6 +43,8 @@ stdenv.mkDerivation rec {
       --replace "/opt/Hyper/hyper" "hyper"
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.hyper;
+
   dontPatchELF = true;
   meta = with lib; {
     description = "A terminal built on web technologies";
diff --git a/pkgs/applications/terminal-emulators/kermit-terminal/default.nix b/pkgs/applications/terminal-emulators/kermit-terminal/default.nix
index 8a5a7856414..da5342aa683 100644
--- a/pkgs/applications/terminal-emulators/kermit-terminal/default.nix
+++ b/pkgs/applications/terminal-emulators/kermit-terminal/default.nix
@@ -6,6 +6,7 @@
 , pcre
 , pkg-config
 , vte
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -31,6 +32,8 @@ stdenv.mkDerivation rec {
     vte
   ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.kermit;
+
   meta = with lib; {
     homepage = "https://github.com/orhun/kermit";
     description = "A VTE-based, simple and froggy terminal emulator";
diff --git a/pkgs/applications/terminal-emulators/kgx/default.nix b/pkgs/applications/terminal-emulators/kgx/default.nix
index dc5c651d50a..1be9412e8da 100644
--- a/pkgs/applications/terminal-emulators/kgx/default.nix
+++ b/pkgs/applications/terminal-emulators/kgx/default.nix
@@ -18,6 +18,7 @@
 , python3
 , sassc
 , wrapGAppsHook
+, nixosTests
 }:
 
 stdenv.mkDerivation {
@@ -66,6 +67,8 @@ stdenv.mkDerivation {
       --replace "Exec=kgx" "Exec=$out/bin/kgx"
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.kgx;
+
   meta = with lib; {
     description = "Simple user-friendly terminal emulator for the GNOME desktop";
     homepage = "https://gitlab.gnome.org/ZanderBrown/kgx";
diff --git a/pkgs/applications/terminal-emulators/kitty/default.nix b/pkgs/applications/terminal-emulators/kitty/default.nix
index 1e6db04dd76..d06c6d937c1 100644
--- a/pkgs/applications/terminal-emulators/kitty/default.nix
+++ b/pkgs/applications/terminal-emulators/kitty/default.nix
@@ -22,6 +22,7 @@
 , zsh
 , fish
 , fetchpatch
+, nixosTests
 }:
 
 with python3Packages;
@@ -176,6 +177,8 @@ buildPythonApplication rec {
     runHook postInstall
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.kitty;
+
   meta = with lib; {
     homepage = "https://github.com/kovidgoyal/kitty";
     description = "A modern, hackable, featureful, OpenGL based terminal emulator";
diff --git a/pkgs/applications/terminal-emulators/lxterminal/default.nix b/pkgs/applications/terminal-emulators/lxterminal/default.nix
index 25495a68683..13f4e3c4119 100644
--- a/pkgs/applications/terminal-emulators/lxterminal/default.nix
+++ b/pkgs/applications/terminal-emulators/lxterminal/default.nix
@@ -1,5 +1,5 @@
 { lib, stdenv, fetchFromGitHub, automake, autoconf, intltool, pkg-config, gtk3, vte, wrapGAppsHook
-, libxslt, docbook_xml_dtd_412, docbook_xsl, libxml2, findXMLCatalogs
+, libxslt, docbook_xml_dtd_412, docbook_xsl, libxml2, findXMLCatalogs, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -35,6 +35,8 @@ stdenv.mkDerivation rec {
 
   doCheck = true;
 
+  passthru.tests.test = nixosTests.terminal-emulators.lxterminal;
+
   meta = {
     description = "The standard terminal emulator of LXDE";
     longDescription = ''
diff --git a/pkgs/applications/terminal-emulators/mlterm/default.nix b/pkgs/applications/terminal-emulators/mlterm/default.nix
index d84aa984a33..056a2413844 100644
--- a/pkgs/applications/terminal-emulators/mlterm/default.nix
+++ b/pkgs/applications/terminal-emulators/mlterm/default.nix
@@ -1,4 +1,4 @@
-{ stdenv, lib, fetchFromGitHub, pkg-config, autoconf, makeDesktopItem
+{ stdenv, lib, fetchFromGitHub, pkg-config, autoconf, makeDesktopItem, nixosTests
 , libX11, gdk-pixbuf, cairo, libXft, gtk3, vte
 , harfbuzz #substituting glyphs with opentype fonts
 , fribidi, m17n_lib #bidi and encoding
@@ -110,6 +110,8 @@ stdenv.mkDerivation rec {
     startupNotify = false;
   };
 
+  passthru.tests.test = nixosTests.terminal-emulators.mlterm;
+
   meta = with lib; {
     description = "Multi Lingual TERMinal emulator";
     homepage = "http://mlterm.sourceforge.net/";
diff --git a/pkgs/applications/terminal-emulators/mrxvt/default.nix b/pkgs/applications/terminal-emulators/mrxvt/default.nix
index ba6c6ab87c1..006616bd944 100644
--- a/pkgs/applications/terminal-emulators/mrxvt/default.nix
+++ b/pkgs/applications/terminal-emulators/mrxvt/default.nix
@@ -10,6 +10,7 @@
 , freetype
 , pkg-config
 , which
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -38,6 +39,8 @@ stdenv.mkDerivation rec {
     NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -I${freetype.dev}/include/freetype2";
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.mrxvt;
+
   meta = with lib; {
     description = "Lightweight multitabbed feature-rich X11 terminal emulator";
     longDescription = "
diff --git a/pkgs/applications/terminal-emulators/roxterm/default.nix b/pkgs/applications/terminal-emulators/roxterm/default.nix
index 3e4454726b6..7444a8627b7 100644
--- a/pkgs/applications/terminal-emulators/roxterm/default.nix
+++ b/pkgs/applications/terminal-emulators/roxterm/default.nix
@@ -1,7 +1,7 @@
 { at-spi2-core, cmake, dbus, dbus-glib, docbook_xsl, libepoxy, fetchFromGitHub
 , glib, gtk3, harfbuzz, libXdmcp, libXtst, libpthreadstubs
 , libselinux, libsepol, libtasn1, libxkbcommon, libxslt, p11-kit, pcre2
-, pkg-config, lib, stdenv, util-linuxMinimal, vte, wrapGAppsHook, xmlto
+, pkg-config, lib, stdenv, util-linuxMinimal, vte, wrapGAppsHook, xmlto, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -23,6 +23,8 @@ stdenv.mkDerivation rec {
       libsepol libxkbcommon libepoxy at-spi2-core libXtst libtasn1 p11-kit
     ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.roxterm;
+
   meta = with lib; {
     homepage = "https://github.com/realh/roxterm";
     license = licenses.gpl3;
diff --git a/pkgs/applications/terminal-emulators/rxvt-unicode/default.nix b/pkgs/applications/terminal-emulators/rxvt-unicode/default.nix
index 1ca84149fdd..0eccceb5a4f 100644
--- a/pkgs/applications/terminal-emulators/rxvt-unicode/default.nix
+++ b/pkgs/applications/terminal-emulators/rxvt-unicode/default.nix
@@ -7,6 +7,7 @@
 , gdkPixbufSupport ? true
 , unicode3Support  ? true
 , emojiSupport     ? false
+, nixosTests
 }:
 
 let
@@ -102,6 +103,8 @@ stdenv.mkDerivation {
     cp -r ${desktopItem}/share/applications/ $out/share/
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.urxvt;
+
   meta = {
     inherit description;
     homepage = "http://software.schmorp.de/pkg/rxvt-unicode.html";
diff --git a/pkgs/applications/terminal-emulators/rxvt-unicode/wrapper.nix b/pkgs/applications/terminal-emulators/rxvt-unicode/wrapper.nix
index 5255d40b9ce..9fe78e30ae3 100644
--- a/pkgs/applications/terminal-emulators/rxvt-unicode/wrapper.nix
+++ b/pkgs/applications/terminal-emulators/rxvt-unicode/wrapper.nix
@@ -5,6 +5,7 @@
 , rxvt-unicode-unwrapped
 , rxvt-unicode-plugins
 , perlPackages
+, nixosTests
 , configure ? { availablePlugins, ... }:
   { plugins = builtins.attrValues availablePlugins;
     extraDeps = [ ];
@@ -51,7 +52,10 @@ let
             --suffix-each URXVT_PERL_LIB ':' "$out/lib/urxvt/perl"
         '';
 
-        passthru.plugins = plugins;
+        passthru = {
+          plugins = plugins;
+          tests.test = nixosTests.terminal-emulators.urxvt;
+        };
       };
 
 in
diff --git a/pkgs/applications/terminal-emulators/sakura/default.nix b/pkgs/applications/terminal-emulators/sakura/default.nix
index 8434c233ad4..52b35ad38ae 100644
--- a/pkgs/applications/terminal-emulators/sakura/default.nix
+++ b/pkgs/applications/terminal-emulators/sakura/default.nix
@@ -9,6 +9,7 @@
 , perl
 , pkg-config
 , vte
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -43,6 +44,8 @@ stdenv.mkDerivation rec {
       --suffix XDG_DATA_DIRS : ${gtk3}/share/gsettings-schemas/${gtk3.name}/
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.sakura;
+
   meta = with lib; {
     homepage = "https://www.pleyades.net/david/projects/sakura";
     description = "A terminal emulator based on GTK and VTE";
diff --git a/pkgs/applications/terminal-emulators/st/default.nix b/pkgs/applications/terminal-emulators/st/default.nix
index 35baec9d0e4..efbdc040589 100644
--- a/pkgs/applications/terminal-emulators/st/default.nix
+++ b/pkgs/applications/terminal-emulators/st/default.nix
@@ -11,6 +11,7 @@
 , conf ? null
 , patches ? [ ]
 , extraLibs ? [ ]
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -55,6 +56,8 @@ stdenv.mkDerivation rec {
 
   installFlags = [ "PREFIX=$(out)" ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.st;
+
   meta = with lib; {
     homepage = "https://st.suckless.org/";
     description = "Simple Terminal for X from Suckless.org Community";
diff --git a/pkgs/applications/terminal-emulators/stupidterm/default.nix b/pkgs/applications/terminal-emulators/stupidterm/default.nix
index 554d210fbde..279af55254e 100644
--- a/pkgs/applications/terminal-emulators/stupidterm/default.nix
+++ b/pkgs/applications/terminal-emulators/stupidterm/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchFromGitHub, pkg-config, vte, gtk, pcre2 }:
+{ lib, stdenv, fetchFromGitHub, pkg-config, vte, gtk, pcre2, nixosTests }:
 
 stdenv.mkDerivation {
   pname = "stupidterm";
@@ -26,6 +26,8 @@ stdenv.mkDerivation {
       --replace "Exec=st" "Exec=$out/bin/stupidterm"
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.stupidterm;
+
   meta = with lib; {
     description = "Simple wrapper around the VTE terminal emulator widget for GTK";
     homepage = "https://github.com/esmil/stupidterm";
diff --git a/pkgs/applications/terminal-emulators/terminator/default.nix b/pkgs/applications/terminal-emulators/terminator/default.nix
index 67c7196e24d..04d4af0ccaf 100644
--- a/pkgs/applications/terminal-emulators/terminator/default.nix
+++ b/pkgs/applications/terminal-emulators/terminator/default.nix
@@ -9,6 +9,7 @@
 , libnotify
 , wrapGAppsHook
 , vte
+, nixosTests
 }:
 
 python3.pkgs.buildPythonApplication rec {
@@ -62,6 +63,8 @@ python3.pkgs.buildPythonApplication rec {
     makeWrapperArgs+=("''${gappsWrapperArgs[@]}")
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.terminator;
+
   meta = with lib; {
     description = "Terminal emulator with support for tiling and tabs";
     longDescription = ''
diff --git a/pkgs/applications/terminal-emulators/termite/default.nix b/pkgs/applications/terminal-emulators/termite/default.nix
index 96fd3409dc7..2b05ecdc722 100644
--- a/pkgs/applications/terminal-emulators/termite/default.nix
+++ b/pkgs/applications/terminal-emulators/termite/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchFromGitHub, fetchpatch, pkg-config, vte, gtk3, ncurses, pcre2, wrapGAppsHook }:
+{ lib, stdenv, fetchFromGitHub, fetchpatch, pkg-config, vte, gtk3, ncurses, pcre2, wrapGAppsHook, nixosTests }:
 
 let
 
@@ -57,7 +57,10 @@ in stdenv.mkDerivation rec {
 
   outputs = [ "out" "terminfo" ];
 
-  passthru = { inherit vte-ng; };
+  passthru = {
+    inherit vte-ng;
+    tests = nixosTests.terminal-emulators.termite;
+  };
 
   postInstall = ''
     mkdir -p $terminfo/share
diff --git a/pkgs/applications/terminal-emulators/termonad/default.nix b/pkgs/applications/terminal-emulators/termonad/default.nix
index 6a1dd0bec39..111d790c695 100644
--- a/pkgs/applications/terminal-emulators/termonad/default.nix
+++ b/pkgs/applications/terminal-emulators/termonad/default.nix
@@ -1,4 +1,4 @@
-{ stdenv, haskellPackages, makeWrapper, packages ? (pkgSet: []) }:
+{ stdenv, haskellPackages, makeWrapper, packages ? (pkgSet: []), nixosTests }:
 
 let
   termonadEnv = haskellPackages.ghcWithPackages (self: [ self.termonad ] ++ packages self);
@@ -17,6 +17,8 @@ in stdenv.mkDerivation {
   preferLocalBuild = true;
   allowSubstitutes = false;
 
+  passthru.tests.test = nixosTests.terminal-emulators.termonad;
+
   meta = haskellPackages.termonad.meta // {
     mainProgram = "termonad";
   };
diff --git a/pkgs/applications/terminal-emulators/tilda/default.nix b/pkgs/applications/terminal-emulators/tilda/default.nix
index 1ea50cfff7a..01cc4f527a9 100644
--- a/pkgs/applications/terminal-emulators/tilda/default.nix
+++ b/pkgs/applications/terminal-emulators/tilda/default.nix
@@ -9,6 +9,7 @@
 , pcre2
 , vte
 , makeWrapper
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -42,6 +43,8 @@ stdenv.mkDerivation rec {
         --prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH"
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.tilda;
+
   meta = with lib; {
     description = "A Gtk based drop down terminal for Linux and Unix";
     homepage = "https://github.com/lanoxx/tilda/";
diff --git a/pkgs/applications/terminal-emulators/tilix/default.nix b/pkgs/applications/terminal-emulators/tilix/default.nix
index 0538a2060ae..917b179490c 100644
--- a/pkgs/applications/terminal-emulators/tilix/default.nix
+++ b/pkgs/applications/terminal-emulators/tilix/default.nix
@@ -17,6 +17,7 @@
 , wrapGAppsHook
 , libunwind
 , appstream
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -66,6 +67,8 @@ stdenv.mkDerivation rec {
       --replace "Exec=tilix" "Exec=$out/bin/tilix"
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.tilix;
+
   meta = with lib; {
     description = "Tiling terminal emulator following the Gnome Human Interface Guidelines";
     homepage = "https://gnunn1.github.io/tilix-web";
diff --git a/pkgs/applications/terminal-emulators/wayst/default.nix b/pkgs/applications/terminal-emulators/wayst/default.nix
index f3751e39881..eeb258f48c4 100644
--- a/pkgs/applications/terminal-emulators/wayst/default.nix
+++ b/pkgs/applications/terminal-emulators/wayst/default.nix
@@ -2,6 +2,7 @@
 , lib
 , fetchFromGitHub
 , pkg-config
+, nixosTests
 , freetype
 , fontconfig
 , libGL
@@ -78,6 +79,8 @@ stdenv.mkDerivation rec {
     install -D icons/wayst.svg $out/share/icons/hicolor/scalable/apps/wayst.svg
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.wayst;
+
   meta = with lib; {
     description = "A simple terminal emulator";
     mainProgram = "wayst";
diff --git a/pkgs/applications/terminal-emulators/wezterm/default.nix b/pkgs/applications/terminal-emulators/wezterm/default.nix
index fc12b1addf6..6d1276035c1 100644
--- a/pkgs/applications/terminal-emulators/wezterm/default.nix
+++ b/pkgs/applications/terminal-emulators/wezterm/default.nix
@@ -22,11 +22,12 @@
 , Cocoa
 , Foundation
 , libiconv
+, nixosTests
 }:
 
 rustPlatform.buildRustPackage rec {
   pname = "wezterm";
-  version = "20220101-133340-7edc5b5a";
+  version = "20220319-142410-0fcdea07";
 
   outputs = [ "out" "terminfo" ];
 
@@ -35,7 +36,7 @@ rustPlatform.buildRustPackage rec {
     repo = pname;
     rev = version;
     fetchSubmodules = true;
-    sha256 = "sha256-UZCvKbZdZ7K4RtvVLmr44M612tqd4rkrjF2tys0JHNM=";
+    sha256 = "sha256-KmIlfzSbVY003WesodN5srJ7qEQaU93izmrZW1MobCo=";
   };
 
   postPatch = ''
@@ -45,7 +46,7 @@ rustPlatform.buildRustPackage rec {
     rm -r wezterm-ssh/tests
   '';
 
-  cargoSha256 = "1imil15n9mf1r71qdp4cb4q7kzrrc2cspml0d54825yqaq8vjhsc";
+  cargoSha256 = "sha256-+Iu6/pd14O1QIsLkHe7fTP30XyI+8J0GiRY8cnRPS5I=";
 
   nativeBuildInputs = [
     pkg-config
@@ -99,6 +100,8 @@ rustPlatform.buildRustPackage rec {
     ln -s $out/bin/{wezterm,wezterm-mux-server,wezterm-gui,strip-ansi-escapes} "$OUT_APP"
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.wezterm;
+
   meta = with lib; {
     description = "A GPU-accelerated cross-platform terminal emulator and multiplexer written by @wez and implemented in Rust";
     homepage = "https://wezfurlong.org/wezterm";
diff --git a/pkgs/applications/terminal-emulators/xterm/default.nix b/pkgs/applications/terminal-emulators/xterm/default.nix
index 3530b2d1d2f..278332ca6a3 100644
--- a/pkgs/applications/terminal-emulators/xterm/default.nix
+++ b/pkgs/applications/terminal-emulators/xterm/default.nix
@@ -76,7 +76,10 @@ stdenv.mkDerivation rec {
   '';
 
   passthru = {
-    tests = { inherit (nixosTests) xterm; };
+    tests = {
+      customTest = nixosTests.xterm;
+      standardTest = nixosTests.terminal-emulators.xterm;
+    };
 
     updateScript = let
       # Tags that end in letters are unstable
diff --git a/pkgs/applications/version-management/gitlab/data.json b/pkgs/applications/version-management/gitlab/data.json
index 462ff47afd7..73f84f7e9ef 100644
--- a/pkgs/applications/version-management/gitlab/data.json
+++ b/pkgs/applications/version-management/gitlab/data.json
@@ -1,14 +1,14 @@
 {
-  "version": "14.8.2",
-  "repo_hash": "1pl528qxsbg75l5nny7cw8hcsd0zs50hhn0ngdrf3gjpd6y7pzcc",
-  "yarn_hash": "0dlhslkhiha4jyfzm0k8i9cgwdk12r5m67i2rznxbrkl38gk9c1x",
+  "version": "14.8.4",
+  "repo_hash": "0ra4d324all26crz84iys9xb40ykpiaqj4z2790zaw1s45wakmgj",
+  "yarn_hash": "106js1j6wii2axh1dxvlfr7mqhvsnsb5qs0danp9c3h1ihd4nz91",
   "owner": "gitlab-org",
   "repo": "gitlab",
-  "rev": "v14.8.2-ee",
+  "rev": "v14.8.4-ee",
   "passthru": {
-    "GITALY_SERVER_VERSION": "14.8.2",
+    "GITALY_SERVER_VERSION": "14.8.4",
     "GITLAB_PAGES_VERSION": "1.54.0",
     "GITLAB_SHELL_VERSION": "13.23.2",
-    "GITLAB_WORKHORSE_VERSION": "14.8.2"
+    "GITLAB_WORKHORSE_VERSION": "14.8.4"
   }
 }
diff --git a/pkgs/applications/version-management/gitlab/gitaly/default.nix b/pkgs/applications/version-management/gitlab/gitaly/default.nix
index b5a05dde1b4..877e0ace9fa 100644
--- a/pkgs/applications/version-management/gitlab/gitaly/default.nix
+++ b/pkgs/applications/version-management/gitlab/gitaly/default.nix
@@ -11,7 +11,7 @@ let
     gemdir = ./.;
   };
 
-  version = "14.8.2";
+  version = "14.8.4";
   gitaly_package = "gitlab.com/gitlab-org/gitaly/v${lib.versions.major version}";
 in
 
@@ -23,7 +23,7 @@ buildGoModule {
     owner = "gitlab-org";
     repo = "gitaly";
     rev = "v${version}";
-    sha256 = "sha256-GgQscKxxYpvzU2M99gmvGj0HM/oD+2Ke24FRzUxv6HM=";
+    sha256 = "sha256-3doXqYj1XsOifAr78ds5ioa6gUfw8uyUwn7JzqlMVSE=";
   };
 
   vendorSha256 = "sha256-Qw9/nlo1eB5dPcldXe9doy4QA4DDVUDad3o4kbdNu34=";
diff --git a/pkgs/applications/version-management/gitlab/gitlab-workhorse/default.nix b/pkgs/applications/version-management/gitlab/gitlab-workhorse/default.nix
index 76273acdff2..3533bccea98 100644
--- a/pkgs/applications/version-management/gitlab/gitlab-workhorse/default.nix
+++ b/pkgs/applications/version-management/gitlab/gitlab-workhorse/default.nix
@@ -5,7 +5,7 @@ in
 buildGoModule rec {
   pname = "gitlab-workhorse";
 
-  version = "14.8.2";
+  version = "14.8.4";
 
   src = fetchFromGitLab {
     owner = data.owner;
diff --git a/pkgs/applications/version-management/meld/default.nix b/pkgs/applications/version-management/meld/default.nix
index bef284ea534..462744641e3 100644
--- a/pkgs/applications/version-management/meld/default.nix
+++ b/pkgs/applications/version-management/meld/default.nix
@@ -1,5 +1,6 @@
 { lib
 , fetchurl
+, fetchpatch
 , gettext
 , itstool
 , python3
@@ -27,6 +28,16 @@ python3.pkgs.buildPythonApplication rec {
     sha256 = "cP6Y65Ms4h1nFw47D2pzF+gT6GLemJM+pROYLpoDMgI=";
   };
 
+  patches = [
+    # Pull upstream fix for meson-0.60:
+    #  https://gitlab.gnome.org/GNOME/meld/-/merge_requests/78
+    (fetchpatch {
+      name = "meson-0.60.patch";
+      url  = "https://gitlab.gnome.org/GNOME/meld/-/commit/cc7746c141d976a4779cf868774fae1fe7627a6d.patch";
+      sha256 = "sha256-4uJZyF00Z6svzrOebByZV1hutCZRkIQYC4rUxQr5fdQ=";
+    })
+  ];
+
   nativeBuildInputs = [
     meson
     ninja
diff --git a/pkgs/applications/virtualization/distrobox/default.nix b/pkgs/applications/virtualization/distrobox/default.nix
index 8e9ea890a9b..e67509c54f8 100644
--- a/pkgs/applications/virtualization/distrobox/default.nix
+++ b/pkgs/applications/virtualization/distrobox/default.nix
@@ -2,13 +2,13 @@
 
 stdenvNoCC.mkDerivation rec {
   pname = "distrobox";
-  version = "1.2.13";
+  version = "1.2.14";
 
   src = fetchFromGitHub {
     owner = "89luca89";
     repo = pname;
     rev = version;
-    sha256 = "047mrhsfi88mgwylnnyxg6xa7hjjrajn2pf7vfmb6161myqybvfy";
+    sha256 = "sha256-gHKyuIL4K/SLBJw8xNuPdNifDcHI91AFTiHaiv38gus=";
   };
 
   dontConfigure = true;
diff --git a/pkgs/applications/virtualization/docker/default.nix b/pkgs/applications/virtualization/docker/default.nix
index c479542f771..87bb16ab833 100644
--- a/pkgs/applications/virtualization/docker/default.nix
+++ b/pkgs/applications/virtualization/docker/default.nix
@@ -10,7 +10,7 @@ rec {
       , containerdRev, containerdSha256
       , tiniRev, tiniSha256, buildxSupport ? true, composeSupport ? true
       # package dependencies
-      , stdenv, fetchFromGitHub, buildGoPackage
+      , stdenv, fetchFromGitHub, fetchpatch, buildGoPackage
       , makeWrapper, installShellFiles, pkg-config, glibc
       , go-md2man, go, containerd, runc, docker-proxy, tini, libtool
       , sqlite, iproute2, lvm2, systemd, docker-buildx, docker-compose_2
@@ -79,6 +79,16 @@ rec {
 
       extraUserPath = optionals (stdenv.isLinux && !clientOnly) (makeBinPath [ rootlesskit slirp4netns fuse-overlayfs ]);
 
+      patches = [
+        # This patch incorporates code from a PR fixing using buildkit with the ZFS graph driver.
+        # It could be removed when a version incorporating this patch is released.
+        (fetchpatch {
+          name = "buildkit-zfs.patch";
+          url = "https://github.com/moby/moby/pull/43136.patch";
+          sha256 = "1WZfpVnnqFwLMYqaHLploOodls0gHF8OCp7MrM26iX8=";
+        })
+      ];
+
       postPatch = ''
         patchShebangs hack/make.sh hack/make/
       '';
diff --git a/pkgs/applications/virtualization/singularity/default.nix b/pkgs/applications/virtualization/singularity/default.nix
index 6589cccfe2f..5ec5b19ce04 100644
--- a/pkgs/applications/virtualization/singularity/default.nix
+++ b/pkgs/applications/virtualization/singularity/default.nix
@@ -15,11 +15,11 @@ with lib;
 
 buildGoPackage rec {
   pname = "singularity";
-  version = "3.8.6";
+  version = "3.8.7";
 
   src = fetchurl {
     url = "https://github.com/hpcng/singularity/releases/download/v${version}/singularity-${version}.tar.gz";
-    sha256 = "sha256-u1o7dnCsnHpLPOWyyfPWtb5g4hsI0zjJ39q7eyqZ9Sg=";
+    sha256 = "sha256-Myny5YP4SoNDyywDgKHWy86vrn0eYztcvK33FD6shZs=";
   };
 
   goPackagePath = "github.com/sylabs/singularity";
diff --git a/pkgs/applications/window-managers/i3/wsr.nix b/pkgs/applications/window-managers/i3/wsr.nix
index 6af5717916b..6e92fba07d1 100644
--- a/pkgs/applications/window-managers/i3/wsr.nix
+++ b/pkgs/applications/window-managers/i3/wsr.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "i3wsr";
-  version = "2.0.1";
+  version = "2.1.0";
 
   src = fetchFromGitHub {
     owner = "roosta";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-JzQWfC0kmnMArpIAE5fgb3YLmXktSCH5aUdrQH9pCbo=";
+    sha256 = "sha256-mwPU700eqyFYihWP1m3y56zXnX16sjSNzB6XNlCttBU=";
   };
 
-  cargoSha256 = "sha256-ZvSdJLaw1nfaqpTBKIiHiXvNFSZhsmLk0PBrV6ykv/w=";
+  cargoSha256 = "sha256-f0Yby/2g7apkqx0iCcd/QkQgMVYZDUQ1vWw8RCXQ9Z4=";
 
   nativeBuildInputs = [ python3 ];
   buildInputs = [ libxcb ];
diff --git a/pkgs/data/themes/yaru/default.nix b/pkgs/data/themes/yaru/default.nix
index eb62da715ce..67762e747f4 100644
--- a/pkgs/data/themes/yaru/default.nix
+++ b/pkgs/data/themes/yaru/default.nix
@@ -16,13 +16,13 @@
 
 stdenv.mkDerivation rec {
   pname = "yaru";
-  version = "22.04.1";
+  version = "22.04.2";
 
   src = fetchFromGitHub {
     owner = "ubuntu";
     repo = "yaru";
     rev = version;
-    sha256 = "sha256-5mB5eTIPw4CqYSQm675MKbRwsYLpg+5WJrLDkbc6nKs=";
+    sha256 = "sha256-oW5OOJPhC3OB3GIQWTQxPgqE7p4bAO1TyVbyKUHnyD0=";
   };
 
   nativeBuildInputs = [ meson sassc pkg-config glib ninja python3 ];
diff --git a/pkgs/desktops/enlightenment/terminology/default.nix b/pkgs/desktops/enlightenment/terminology/default.nix
index a2e4f717f48..7b3ffe2f112 100644
--- a/pkgs/desktops/enlightenment/terminology/default.nix
+++ b/pkgs/desktops/enlightenment/terminology/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchurl, meson, ninja, pkg-config, python3, efl }:
+{ lib, stdenv, fetchurl, meson, ninja, pkg-config, python3, efl, nixosTests }:
 
 stdenv.mkDerivation rec {
   pname = "terminology";
@@ -24,6 +24,8 @@ stdenv.mkDerivation rec {
     patchShebangs data/colorschemes/*.py
   '';
 
+  passthru.tests.test = nixosTests.terminal-emulators.terminology;
+
   meta = with lib; {
     description = "Powerful terminal emulator based on EFL";
     homepage = "https://www.enlightenment.org/about-terminology";
diff --git a/pkgs/desktops/gnome/core/gnome-terminal/default.nix b/pkgs/desktops/gnome/core/gnome-terminal/default.nix
index 69fb69261f9..588d6e3ced3 100644
--- a/pkgs/desktops/gnome/core/gnome-terminal/default.nix
+++ b/pkgs/desktops/gnome/core/gnome-terminal/default.nix
@@ -23,6 +23,7 @@
 , pcre2
 , libxslt
 , docbook-xsl-nons
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -82,6 +83,8 @@ stdenv.mkDerivation rec {
     };
   };
 
+  passthru.tests.test = nixosTests.terminal-emulators.gnome-terminal;
+
   meta = with lib; {
     description = "The GNOME Terminal Emulator";
     homepage = "https://wiki.gnome.org/Apps/Terminal";
diff --git a/pkgs/desktops/lxqt/qterminal/default.nix b/pkgs/desktops/lxqt/qterminal/default.nix
index e93e404762a..9412506385b 100644
--- a/pkgs/desktops/lxqt/qterminal/default.nix
+++ b/pkgs/desktops/lxqt/qterminal/default.nix
@@ -8,6 +8,7 @@
 , qttools
 , qtx11extras
 , lxqtUpdateScript
+, nixosTests
 }:
 
 mkDerivation rec {
@@ -35,6 +36,8 @@ mkDerivation rec {
 
   passthru.updateScript = lxqtUpdateScript { inherit pname version src; };
 
+  passthru.tests.test = nixosTests.terminal-emulators.qterminal;
+
   meta = with lib; {
     homepage = "https://github.com/lxqt/qterminal";
     description = "A lightweight Qt-based terminal emulator";
diff --git a/pkgs/desktops/mate/mate-terminal/default.nix b/pkgs/desktops/mate/mate-terminal/default.nix
index 19fa5697f96..fcae87a869e 100644
--- a/pkgs/desktops/mate/mate-terminal/default.nix
+++ b/pkgs/desktops/mate/mate-terminal/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchurl, pkg-config, gettext, itstool, libxml2, mate-desktop, dconf, vte, pcre2, wrapGAppsHook, mateUpdateScript }:
+{ lib, stdenv, fetchurl, pkg-config, gettext, itstool, libxml2, mate-desktop, dconf, vte, pcre2, wrapGAppsHook, mateUpdateScript, nixosTests }:
 
 stdenv.mkDerivation rec {
   pname = "mate-terminal";
@@ -28,6 +28,8 @@ stdenv.mkDerivation rec {
 
   passthru.updateScript = mateUpdateScript { inherit pname version; };
 
+  passthru.tests.test = nixosTests.terminal-emulators.mate-terminal;
+
   meta = with lib; {
     description = "MATE desktop terminal emulator";
     homepage = "https://mate-desktop.org";
diff --git a/pkgs/desktops/xfce/applications/xfce4-terminal/default.nix b/pkgs/desktops/xfce/applications/xfce4-terminal/default.nix
index 4d887f96ffd..65611f73a1d 100644
--- a/pkgs/desktops/xfce/applications/xfce4-terminal/default.nix
+++ b/pkgs/desktops/xfce/applications/xfce4-terminal/default.nix
@@ -1,4 +1,4 @@
-{ lib, mkXfceDerivation, gtk3, libxfce4ui, vte, xfconf, pcre2, libxslt, docbook_xml_dtd_45, docbook_xsl }:
+{ lib, mkXfceDerivation, gtk3, libxfce4ui, vte, xfconf, pcre2, libxslt, docbook_xml_dtd_45, docbook_xsl, nixosTests }:
 
 mkXfceDerivation {
   category = "apps";
@@ -11,6 +11,8 @@ mkXfceDerivation {
 
   buildInputs = [ gtk3 libxfce4ui vte xfconf pcre2 ];
 
+  passthru.tests.test = nixosTests.terminal-emulators.xfce4-terminal;
+
   meta = with lib; {
     description = "A modern terminal emulator";
     maintainers = with maintainers; [ ] ++ teams.xfce.members;
diff --git a/pkgs/development/haskell-modules/make-package-set.nix b/pkgs/development/haskell-modules/make-package-set.nix
index 86f29e73113..80dc94af4df 100644
--- a/pkgs/development/haskell-modules/make-package-set.nix
+++ b/pkgs/development/haskell-modules/make-package-set.nix
@@ -384,6 +384,18 @@ in package-set { inherit pkgs lib callPackage; } self // {
         # for the "shellFor" environment (ensuring that any test/benchmark
         # dependencies for "foo" will be available within the nix-shell).
       , genericBuilderArgsModifier ? (args: args)
+
+        # Extra dependencies, in the form of cabal2nix build attributes.
+        #
+        # An example use case is when you have Haskell scripts that use
+        # libraries that don't occur in your packages' dependencies.
+        #
+        # Example:
+        #
+        #   extraDependencies = p: {
+        #     libraryHaskellDepends = [ p.releaser ];
+        #   };
+      , extraDependencies ? p: {}
       , ...
       } @ args:
       let
@@ -474,7 +486,7 @@ in package-set { inherit pkgs lib callPackage; } self // {
         # See the Note in `zipperCombinedPkgs` for what gets filtered out from
         # each of these dependency lists.
         packageInputs =
-          pkgs.lib.zipAttrsWith (_name: zipperCombinedPkgs) cabalDepsForSelected;
+          pkgs.lib.zipAttrsWith (_name: zipperCombinedPkgs) (cabalDepsForSelected ++ [ (extraDependencies self) ]);
 
         # A attribute set to pass to `haskellPackages.mkDerivation`.
         #
@@ -514,7 +526,7 @@ in package-set { inherit pkgs lib callPackage; } self // {
         # pkgWithCombinedDepsDevDrv :: Derivation
         pkgWithCombinedDepsDevDrv = pkgWithCombinedDeps.envFunc { inherit withHoogle; };
 
-        mkDerivationArgs = builtins.removeAttrs args [ "genericBuilderArgsModifier" "packages" "withHoogle" "doBenchmark" ];
+        mkDerivationArgs = builtins.removeAttrs args [ "genericBuilderArgsModifier" "packages" "withHoogle" "doBenchmark" "extraDependencies" ];
 
       in pkgWithCombinedDepsDevDrv.overrideAttrs (old: mkDerivationArgs // {
         nativeBuildInputs = old.nativeBuildInputs ++ mkDerivationArgs.nativeBuildInputs or [];
diff --git a/pkgs/development/haskell-modules/package-list.nix b/pkgs/development/haskell-modules/package-list.nix
index 64f4be3a772..688844bcc46 100644
--- a/pkgs/development/haskell-modules/package-list.nix
+++ b/pkgs/development/haskell-modules/package-list.nix
@@ -1,10 +1,16 @@
 { runCommand, haskellPackages, lib, all-cabal-hashes, writeShellScript }:
 let
+  # Checks if the version looks like a Haskell PVP version which is the format
+  # Hackage enforces. This will return false if the version strings is empty or
+  # we've overridden the package to ship an unstable version of the package
+  # (sadly there's no good way to show something useful on hackage in this case).
+  isPvpVersion = v: builtins.match "([0-9]+)(\\.[0-9]+)*" v != null;
+
   pkgLine = name: pkg:
     let
       version = pkg.version or "";
     in
-    if version != "" then
+    if isPvpVersion version then
       ''"${name}","${version}","http://hydra.nixos.org/job/nixpkgs/trunk/haskellPackages.${name}.x86_64-linux"''
     else "";
   all-haskellPackages = builtins.toFile "all-haskellPackages" (lib.concatStringsSep "\n" (lib.filter (x: x != "") (lib.mapAttrsToList pkgLine haskellPackages)));
diff --git a/pkgs/development/libraries/exiv2/default.nix b/pkgs/development/libraries/exiv2/default.nix
index f352714d237..7a79e597beb 100644
--- a/pkgs/development/libraries/exiv2/default.nix
+++ b/pkgs/development/libraries/exiv2/default.nix
@@ -52,6 +52,7 @@ stdenv.mkDerivation rec {
   cmakeFlags = [
     "-DEXIV2_ENABLE_NLS=ON"
     "-DEXIV2_BUILD_DOC=ON"
+    "-DEXIV2_ENABLE_BMFF=ON"
   ];
 
   buildFlags = [
diff --git a/pkgs/development/libraries/gdal/default.nix b/pkgs/development/libraries/gdal/default.nix
index 3e5ae303dd7..bfa1c75b483 100644
--- a/pkgs/development/libraries/gdal/default.nix
+++ b/pkgs/development/libraries/gdal/default.nix
@@ -7,13 +7,13 @@ with lib;
 
 stdenv.mkDerivation rec {
   pname = "gdal";
-  version = "3.4.1";
+  version = "3.4.2";
 
   src = fetchFromGitHub {
     owner = "OSGeo";
     repo = "gdal";
     rev = "v${version}";
-    sha256 = "11rjdaxmsp9n3r9xhmgd7ksy8bh5fazwsxdj0xvl4hsy6bcn4n97";
+    sha256 = "sha256-bE55VV0SrG8nxCLdpODRalnuAkn+olRdMLUjduavj6M=";
   };
 
   sourceRoot = "source/gdal";
diff --git a/pkgs/development/libraries/libqalculate/default.nix b/pkgs/development/libraries/libqalculate/default.nix
index bb70ffcd32a..55132d15523 100644
--- a/pkgs/development/libraries/libqalculate/default.nix
+++ b/pkgs/development/libraries/libqalculate/default.nix
@@ -3,13 +3,13 @@
 
 stdenv.mkDerivation rec {
   pname = "libqalculate";
-  version = "4.0.0";
+  version = "4.1.0";
 
   src = fetchFromGitHub {
     owner = "qalculate";
     repo = "libqalculate";
     rev = "v${version}";
-    sha256 = "sha256-aRHwkdAbM164diIAIyBp1Kt6u/GLyCWTtwF4eFaWbGU=";
+    sha256 = "sha256-P3mb5HEj9gHq2mABdIRxF6ZukrPd70sy0DRLT0qKDqk=";
   };
 
   outputs = [ "out" "dev" "doc" ];
diff --git a/pkgs/development/libraries/mysocketw/default.nix b/pkgs/development/libraries/mysocketw/default.nix
index 62bd7cea1a1..32987d64992 100644
--- a/pkgs/development/libraries/mysocketw/default.nix
+++ b/pkgs/development/libraries/mysocketw/default.nix
@@ -1,27 +1,38 @@
-{ lib, stdenv, fetchFromGitHub, openssl, cmake }:
+{ lib
+, stdenv
+, fetchFromGitHub
+, openssl
+, cmake
+}:
 
 stdenv.mkDerivation rec {
   pname = "mysocketw";
-  version = "3.10.27";
+  version = "3.11.0";
 
   src = fetchFromGitHub {
     owner = "RigsOfRods";
     repo = "socketw";
     rev = version;
-    sha256 = "0xqcgwb1lyc2d8834sq5adbmggyn6vvb26jw20862sxa15j0qfd4";
+    hash = "sha256-mpfhmKE2l59BllkOjmURIfl17lAakXpmGh2x9SFSaAo=";
   };
 
-  nativeBuildInputs = [ cmake ];
-  buildInputs = [ openssl ];
+  nativeBuildInputs = [
+    cmake
+  ];
+
+  buildInputs = [
+    openssl
+  ];
 
   postPatch = lib.optionalString stdenv.isDarwin ''
     substituteInPlace src/Makefile \
         --replace -Wl,-soname, -Wl,-install_name,$out/lib/
   '';
 
-  meta = {
+  meta = with lib; {
     description = "Cross platform (Linux/FreeBSD/Unix/Win32) streaming socket C++";
-    license = lib.licenses.lgpl21Plus;
-    platforms = lib.platforms.all;
+    homepage = "https://github.com/RigsOfRods/socketw";
+    license = licenses.lgpl21Plus;
+    maintainers = with maintainers; [ ];
   };
 }
diff --git a/pkgs/development/libraries/science/math/suitesparse-graphblas/default.nix b/pkgs/development/libraries/science/math/suitesparse-graphblas/default.nix
index bf5251d1aed..eea7c146ca3 100644
--- a/pkgs/development/libraries/science/math/suitesparse-graphblas/default.nix
+++ b/pkgs/development/libraries/science/math/suitesparse-graphblas/default.nix
@@ -6,7 +6,7 @@
 
 stdenv.mkDerivation rec {
   pname = "suitesparse-graphblas";
-  version = "6.2.2";
+  version = "6.2.5";
 
   outputs = [ "out" "dev" ];
 
@@ -14,7 +14,7 @@ stdenv.mkDerivation rec {
     owner = "DrTimothyAldenDavis";
     repo = "GraphBLAS";
     rev = "v${version}";
-    sha256 = "sha256-uSPE7uFiG4xbsAeo/UmOP5Ns+3yZ7kKL2bNxzd8mzP8=";
+    sha256 = "sha256-N4yFlTxV+lVz70PSHPuWEEFLp0dpsImXYDLUYEo2JQI=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/libraries/vte/default.nix b/pkgs/development/libraries/vte/default.nix
index eae934a4ebb..aae30390ee4 100644
--- a/pkgs/development/libraries/vte/default.nix
+++ b/pkgs/development/libraries/vte/default.nix
@@ -22,6 +22,7 @@
 , icu
 , systemd
 , systemdSupport ? stdenv.hostPlatform.isLinux
+, nixosTests
 }:
 
 stdenv.mkDerivation rec {
@@ -91,6 +92,9 @@ stdenv.mkDerivation rec {
       packageName = pname;
       versionPolicy = "odd-unstable";
     };
+    tests = {
+      inherit (nixosTests.terminal-emulators) gnome-terminal lxterminal mlterm roxterm sakura stupidterm terminator termite xfce4-terminal;
+    };
   };
 
   meta = with lib; {
diff --git a/pkgs/development/libraries/yder/default.nix b/pkgs/development/libraries/yder/default.nix
index 49544a37468..9dd55482300 100644
--- a/pkgs/development/libraries/yder/default.nix
+++ b/pkgs/development/libraries/yder/default.nix
@@ -11,13 +11,13 @@
 
 stdenv.mkDerivation rec {
   pname = "yder";
-  version = "1.4.14";
+  version = "1.4.15";
 
   src = fetchFromGitHub {
     owner = "babelouest";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-4FSUBFqrxTbqg2EKYuXv4gUeE40ViNZRk5gHv+C2p9o=";
+    sha256 = "sha256-hPAL1UngodNbQCCdKulaF5faI0JOjmWdz3q8oyPH7C4=";
   };
 
   patches = [
diff --git a/pkgs/development/python-modules/ailment/default.nix b/pkgs/development/python-modules/ailment/default.nix
index 94c47ab62a0..ef606a027d0 100644
--- a/pkgs/development/python-modules/ailment/default.nix
+++ b/pkgs/development/python-modules/ailment/default.nix
@@ -7,7 +7,7 @@
 
 buildPythonPackage rec {
   pname = "ailment";
-  version = "9.1.11752";
+  version = "9.1.12332";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -16,7 +16,7 @@ buildPythonPackage rec {
     owner = "angr";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-UbcPxYEyuX8W0uZXeCu00yBshdcPBAQKzZqhAYXTf+8=";
+    hash = "sha256-qWKvNhiOAonUi0qpOWtwbNZa2lgBQ+gaGrAHMgDdr4Q=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/aiobotocore/default.nix b/pkgs/development/python-modules/aiobotocore/default.nix
index ef39f451d59..01066c127be 100644
--- a/pkgs/development/python-modules/aiobotocore/default.nix
+++ b/pkgs/development/python-modules/aiobotocore/default.nix
@@ -10,12 +10,12 @@
 
 buildPythonPackage rec {
   pname = "aiobotocore";
-  version = "2.1.1";
+  version = "2.1.2";
   disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-2+mrmXhRwkWLB6hfaCvizPNdZ51d4Pj1cSKfdArXunE=";
+    sha256 = "sha256-AP1/Q8wEhNjtJ0/QvkkqoWp/6medvqlqYCu3IspMLSI=";
   };
 
   # relax version constraints: aiobotocore works with newer botocore versions
diff --git a/pkgs/development/python-modules/aioridwell/default.nix b/pkgs/development/python-modules/aioridwell/default.nix
index 7c3def1ec55..2b4cb28bbb5 100644
--- a/pkgs/development/python-modules/aioridwell/default.nix
+++ b/pkgs/development/python-modules/aioridwell/default.nix
@@ -17,7 +17,7 @@
 
 buildPythonPackage rec {
   pname = "aioridwell";
-  version = "2021.12.2";
+  version = "2022.03.0";
   format = "pyproject";
 
   disabled = pythonOlder "3.8";
@@ -26,7 +26,7 @@ buildPythonPackage rec {
     owner = "bachya";
     repo = pname;
     rev = version;
-    sha256 = "sha256-QFUXWleHRMBgaRsMNt2xFb3XcbCNI2kKQHKCBrUuG6Q=";
+    hash = "sha256-UiHT1YbBb9UTughVw2oJxRtvhUDVqQWqEcXMEXwy2cI=";
   };
 
   nativeBuildInputs = [
@@ -49,12 +49,6 @@ buildPythonPackage rec {
     types-pytz
   ];
 
-  postPatch = ''
-    substituteInPlace pyproject.toml \
-      --replace 'titlecase = "^2.3"' 'titlecase = "*"' \
-      --replace 'pytz = "^2021.3"' 'pytz = "*"'
-  '';
-
   disabledTests = [
     # AssertionError: assert datetime.date(...
     "test_get_next_pickup_event"
diff --git a/pkgs/development/python-modules/angr/default.nix b/pkgs/development/python-modules/angr/default.nix
index 93a4b22f5dd..1881da62f50 100644
--- a/pkgs/development/python-modules/angr/default.nix
+++ b/pkgs/development/python-modules/angr/default.nix
@@ -46,7 +46,7 @@ in
 
 buildPythonPackage rec {
   pname = "angr";
-  version = "9.1.11752";
+  version = "9.1.12332";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -55,7 +55,7 @@ buildPythonPackage rec {
     owner = pname;
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-4DUM1c3M/naJFqN/gdrX/NnJrY3ElUEOQ34cwcpSC+s=";
+    hash = "sha256-GaW1XyFOnjU28HqptFC6+Fe41zYZMR716Nsq0dPy660=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/angrop/default.nix b/pkgs/development/python-modules/angrop/default.nix
index 21eeeb2369d..8164190cd49 100644
--- a/pkgs/development/python-modules/angrop/default.nix
+++ b/pkgs/development/python-modules/angrop/default.nix
@@ -9,7 +9,7 @@
 
 buildPythonPackage rec {
   pname = "angrop";
-  version = "9.1.11752";
+  version = "9.1.12332";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -18,7 +18,7 @@ buildPythonPackage rec {
     owner = "angr";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-nZGAuWp07VMpOvqw38FGSiUhaFjJOfCzOaam4Ex7qbY=";
+    hash = "sha256-lhwlZ7eHaEMaTW7c+WCRSeGSIQ5IeEx6XALyYJH+Ey0=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/archinfo/default.nix b/pkgs/development/python-modules/archinfo/default.nix
index add8e72d8e7..f9affcddd12 100644
--- a/pkgs/development/python-modules/archinfo/default.nix
+++ b/pkgs/development/python-modules/archinfo/default.nix
@@ -1,6 +1,7 @@
 { lib
 , buildPythonPackage
 , fetchFromGitHub
+, fetchpatch
 , pytestCheckHook
 , nose
 , pythonOlder
@@ -8,7 +9,7 @@
 
 buildPythonPackage rec {
   pname = "archinfo";
-  version = "9.1.11752";
+  version = "9.1.12332";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -17,7 +18,7 @@ buildPythonPackage rec {
     owner = "angr";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-D1YssHa14q2jxn4HtOYZlTdwGPkiiMhWuOh08fj87ic=";
+    hash = "sha256-nv/hwQZgKv/cM8fF6GqI8zY9GAe8aCZ/AGFOmhz+bMM=";
   };
 
   checkInputs = [
@@ -25,6 +26,15 @@ buildPythonPackage rec {
     pytestCheckHook
   ];
 
+  patches = [
+    # Make archinfo import without installing pyvex, https://github.com/angr/archinfo/pull/113
+    (fetchpatch {
+      name = "fix-import-issue.patch";
+      url = "https://github.com/angr/archinfo/commit/d29c108f55ffd458ff1d3d65db2d651c76b19267.patch";
+      sha256 = "sha256-9vi0QyqQLIPQxFuB8qrpcnPXWOJ6d27/IXJE/Ui6HhM=";
+    })
+  ];
+
   pythonImportsCheck = [
     "archinfo"
   ];
diff --git a/pkgs/development/python-modules/argon2_cffi/default.nix b/pkgs/development/python-modules/argon2_cffi/default.nix
index 839429bc948..4ecf5fbc5f0 100644
--- a/pkgs/development/python-modules/argon2_cffi/default.nix
+++ b/pkgs/development/python-modules/argon2_cffi/default.nix
@@ -23,7 +23,8 @@ buildPythonPackage rec {
     sha256 = "d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b";
   };
 
-  propagatedBuildInputs = [ cffi six ] ++ lib.optional (!isPy3k) enum34;
+  propagatedBuildInputs = [ cffi six argon2-cffi-bindings ]
+    ++ lib.optional (!isPy3k) enum34;
 
   propagatedNativeBuildInputs = [
     argon2-cffi-bindings
diff --git a/pkgs/development/python-modules/azure-core/default.nix b/pkgs/development/python-modules/azure-core/default.nix
index 9a128d81348..b7d330e6eff 100644
--- a/pkgs/development/python-modules/azure-core/default.nix
+++ b/pkgs/development/python-modules/azure-core/default.nix
@@ -1,4 +1,4 @@
-{ lib, buildPythonPackage, fetchPypi, isPy27
+{ lib, stdenv, buildPythonPackage, fetchPypi, isPy27
 , aiodns
 , aiohttp
 , flask
@@ -51,7 +51,18 @@ buildPythonPackage rec {
 
   pytestFlagsArray = [ "tests/" ];
   # disable tests which touch network
-  disabledTests = [ "aiohttp" "multipart_send" "response" "request" "timeout" ];
+  disabledTests = [
+    "aiohttp"
+    "multipart_send"
+    "response"
+    "request"
+    "timeout"
+  # disable 8 tests failing on some darwin machines with errors:
+  # azure.core.polling.base_polling.BadStatus: Invalid return status 403 for 'GET' operation
+  # azure.core.exceptions.HttpResponseError: Operation returned an invalid status 'Forbidden'
+  ] ++ lib.optional stdenv.isDarwin [
+    "location_polling_fail"
+  ];
   disabledTestPaths = [
     # requires testing modules which aren't published, and likely to create cyclic dependencies
     "tests/test_connection_string_parsing.py"
diff --git a/pkgs/development/python-modules/broadlink/default.nix b/pkgs/development/python-modules/broadlink/default.nix
index dc1e4f37f3c..ba635a39257 100644
--- a/pkgs/development/python-modules/broadlink/default.nix
+++ b/pkgs/development/python-modules/broadlink/default.nix
@@ -2,22 +2,26 @@
 , buildPythonPackage
 , fetchPypi
 , cryptography
+, pythonOlder
 }:
 
 buildPythonPackage rec {
   pname = "broadlink";
-  version = "0.18.0";
+  version = "0.18.1";
+  format = "setuptools";
+
+  disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "c66b3e4a097d6549f0fcc9ccdf289bd88f73f647ad9449e1c4e2958301ad1b04";
+    hash = "sha256-KMfL1mY4th87gjPrdhvzQjdXucgwSChsykOCO3cPAD8=";
   };
 
   propagatedBuildInputs = [
     cryptography
   ];
 
-  # no tests available
+  # Module has no tests
   doCheck = false;
 
   pythonImportsCheck = [
@@ -28,5 +32,6 @@ buildPythonPackage rec {
     description = "Python API for controlling Broadlink IR controllers";
     homepage =  "https://github.com/mjg59/python-broadlink";
     license = licenses.mit;
+    maintainers = with maintainers; [ fab ];
   };
 }
diff --git a/pkgs/development/python-modules/claripy/default.nix b/pkgs/development/python-modules/claripy/default.nix
index 10f5762f0c4..9816ff3782b 100644
--- a/pkgs/development/python-modules/claripy/default.nix
+++ b/pkgs/development/python-modules/claripy/default.nix
@@ -13,7 +13,7 @@
 
 buildPythonPackage rec {
   pname = "claripy";
-  version = "9.1.11752";
+  version = "9.1.12332";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -22,7 +22,7 @@ buildPythonPackage rec {
     owner = "angr";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-Z50oKwS0MZVBEUeXfj9cgtPYXFAYf4i7QkgJiXdWrxo=";
+    sha256 = "sha256-YrR8OkDoop6kHAuk4cM4STYYOjjaMLZCQuE07/5IXqs=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/cle/default.nix b/pkgs/development/python-modules/cle/default.nix
index e1b1b3b0bf4..6c8126172c0 100644
--- a/pkgs/development/python-modules/cle/default.nix
+++ b/pkgs/development/python-modules/cle/default.nix
@@ -15,7 +15,7 @@
 
 let
   # The binaries are following the argr projects release cycle
-  version = "9.1.11752";
+  version = "9.1.12332";
 
   # Binary files from https://github.com/angr/binaries (only used for testing and only here)
   binaries = fetchFromGitHub {
@@ -37,7 +37,7 @@ buildPythonPackage rec {
     owner = "angr";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-pnbFnv/te7U2jB6gNRvE9DQssBkFsara1g6Gtqf+WVo=";
+    hash = "sha256-xcj6Skzzmw5g+0KsBMLNOhRyXQA7nbgnc9YyfJLteCM=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/cloudsmith-api/default.nix b/pkgs/development/python-modules/cloudsmith-api/default.nix
index 92e5a71ee80..03203550600 100644
--- a/pkgs/development/python-modules/cloudsmith-api/default.nix
+++ b/pkgs/development/python-modules/cloudsmith-api/default.nix
@@ -9,14 +9,14 @@
 
 buildPythonPackage rec {
   pname = "cloudsmith-api";
-  version = "1.33.7";
+  version = "1.42.3";
 
   format = "wheel";
 
   src = fetchPypi {
     pname = "cloudsmith_api";
     inherit format version;
-    sha256 = "sha256-KNm2O2kZg+YzjtebsBoL7BOHCuffDELXm2k8vIFtKdk=";
+    sha256 = "sha256-P0QuKkyFk3jvYJwtul0/eUTrDyj2QKAjU/Ac+4VCYYk=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/coqui-trainer/default.nix b/pkgs/development/python-modules/coqui-trainer/default.nix
index 5ff53b8eb75..de04e444e83 100644
--- a/pkgs/development/python-modules/coqui-trainer/default.nix
+++ b/pkgs/development/python-modules/coqui-trainer/default.nix
@@ -4,7 +4,7 @@
 
 , coqpit
 , fsspec
-, pytorch-bin
+, pytorch
 
 , pytestCheckHook
 , soundfile
@@ -14,7 +14,7 @@
 
 let
   pname = "coqui-trainer";
-  version = "0.0.4";
+  version = "0.0.5";
 in
 buildPythonPackage {
   inherit pname version;
@@ -23,21 +23,19 @@ buildPythonPackage {
   src = fetchFromGitHub {
     owner = "coqui-ai";
     repo = "Trainer";
-    # https://github.com/coqui-ai/Trainer/issues/4
-    rev = "776eba829231543d3207927fc69b321d121e527c";
-    hash = "sha256-ICveftJjBNsCgegTmd/ewd/Y6XGMg7YOvchx640RFPI=";
+    rev = "v${version}";
+    hash = "sha256-NsgCh+N2qWmRkTOjXqisVCP5aInH2zcNz6lsnIfVLiY=";
   };
 
   propagatedBuildInputs = [
     coqpit
     fsspec
-    pytorch-bin
+    pytorch
     soundfile
     tensorboardx
   ];
 
-  # tests are failing; tests require the clearml library
-  # https://github.com/coqui-ai/Trainer/issues/5
+  # only one test and that requires training data from the internet
   doCheck = false;
 
   checkInputs = [
diff --git a/pkgs/development/python-modules/discogs-client/default.nix b/pkgs/development/python-modules/discogs-client/default.nix
index f4940cf7c61..0295735abd7 100644
--- a/pkgs/development/python-modules/discogs-client/default.nix
+++ b/pkgs/development/python-modules/discogs-client/default.nix
@@ -10,7 +10,7 @@
 
 buildPythonPackage rec {
   pname = "discogs-client";
-  version = "2.3.13";
+  version = "2.3.14";
   format = "setuptools";
 
   disabled = pythonOlder "3.7";
@@ -19,7 +19,7 @@ buildPythonPackage rec {
     owner = "joalla";
     repo = "discogs_client";
     rev = "v${version}";
-    sha256 = "sha256-TOja0pCJv8TAI0ns8M/tamZ5Pp8k5sSKDnvN4SeKtW8=";
+    sha256 = "sha256-HJxqTwZ9byjZxdftQPTkVo0Ufio9gNGH69q5Q2gYR00=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/elementpath/default.nix b/pkgs/development/python-modules/elementpath/default.nix
index 4dc34f92365..e756701ab4e 100644
--- a/pkgs/development/python-modules/elementpath/default.nix
+++ b/pkgs/development/python-modules/elementpath/default.nix
@@ -6,7 +6,7 @@
 
 buildPythonPackage rec {
   pname = "elementpath";
-  version = "2.4.0";
+  version = "2.5.0";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -15,7 +15,7 @@ buildPythonPackage rec {
     owner = "sissaschool";
     repo = "elementpath";
     rev = "v${version}";
-    sha256 = "1f3w5zyvrkl4gab81i5z9b41ybs54b37znj5r7hrcf25x8hrqgvv";
+    sha256 = "sha256-I2Vg0rpCFH1Z+N+JgtDv2se6lXsggzOsJn3Fj252aTQ=";
   };
 
   # avoid circular dependency with xmlschema which directly depends on this
diff --git a/pkgs/development/python-modules/gruut-ipa/default.nix b/pkgs/development/python-modules/gruut-ipa/default.nix
index ad1da8b2917..f0824670731 100644
--- a/pkgs/development/python-modules/gruut-ipa/default.nix
+++ b/pkgs/development/python-modules/gruut-ipa/default.nix
@@ -1,32 +1,33 @@
 { lib
 , buildPythonPackage
 , fetchFromGitHub
-, pkgs
+, espeak
+, numpy
 , python
 }:
 
 buildPythonPackage rec {
   pname = "gruut-ipa";
-  version = "0.12.0";
+  version = "0.13.0";
   format = "setuptools";
 
   src = fetchFromGitHub {
     owner = "rhasspy";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-6pMdBKbp++/5321rc8A2euOSXZCHzHg+wmaEaMZ0egw=";
+    sha256 = "sha256-Q2UKELoG8OaAPxIrZNCpXgeWZ2fCzb3g3SOVzCm/gg0=";
   };
 
   postPatch = ''
-    patchShebangs bin/speak-ipa
+    patchShebangs bin/*
     substituteInPlace bin/speak-ipa \
       --replace '${"\${src_dir}:"}' "$out/lib/${python.libPrefix}/site-packages:" \
-      --replace "do espeak" "do ${pkgs.espeak}/bin/espeak"
+      --replace "do espeak" "do ${espeak}/bin/espeak"
   '';
 
-  postInstall = ''
-    install -m0755 bin/speak-ipa $out/bin/speak-ipa
-  '';
+  propagatedBuildInputs = [
+    numpy
+  ];
 
   checkPhase = ''
     runHook preCheck
diff --git a/pkgs/development/python-modules/gruut/default.nix b/pkgs/development/python-modules/gruut/default.nix
index f1e490a65cb..5078c6fdb39 100644
--- a/pkgs/development/python-modules/gruut/default.nix
+++ b/pkgs/development/python-modules/gruut/default.nix
@@ -36,14 +36,14 @@ let
 in
 buildPythonPackage rec {
   pname = "gruut";
-  version = "2.2.0";
+  version = "2.2.3";
   format = "setuptools";
 
   src = fetchFromGitHub {
     owner = "rhasspy";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-9vj3x2IjTso8ksN1cqe5frwg0Y3GhOB6bPWvaBSBOf8=";
+    sha256 = "sha256-B5fPUW4YaMzDDXxncfrWwxGdUizuaxnPImNMf1ZZJ/I=";
   };
 
   postPatch = ''
diff --git a/pkgs/development/python-modules/hahomematic/default.nix b/pkgs/development/python-modules/hahomematic/default.nix
index 3707391e8e2..be9be54a79a 100644
--- a/pkgs/development/python-modules/hahomematic/default.nix
+++ b/pkgs/development/python-modules/hahomematic/default.nix
@@ -14,7 +14,7 @@
 
 buildPythonPackage rec {
   pname = "hahomematic";
-  version = "0.37.7";
+  version = "0.38.2";
   format = "setuptools";
 
   disabled = pythonOlder "3.9";
@@ -23,7 +23,7 @@ buildPythonPackage rec {
     owner = "danielperna84";
     repo = pname;
     rev = version;
-    sha256 = "sha256-rVtXfoQ/1GAc6ugT/jduoMacCDjrlGagWhK1rYgl/1Q=";
+    sha256 = "sha256-oyO4+zxyhr2azUdeNfw0WjgN6LFxi3+svJ/B/tUEqjQ=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/invoke/default.nix b/pkgs/development/python-modules/invoke/default.nix
index 45c341c209c..45711a3dc20 100644
--- a/pkgs/development/python-modules/invoke/default.nix
+++ b/pkgs/development/python-modules/invoke/default.nix
@@ -2,15 +2,19 @@
 , bash
 , buildPythonPackage
 , fetchPypi
+, pythonOlder
 }:
 
 buildPythonPackage rec {
   pname = "invoke";
-  version = "1.6.0";
+  version = "1.7.0";
+  format = "setuptools";
+
+  disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "374d1e2ecf78981da94bfaf95366216aaec27c2d6a7b7d5818d92da55aa258d3";
+    hash = "sha256-4zLkneQEY/IBYxX1HfQjE4VXcr6GQ1aGFWvBj0W1zGw=";
   };
 
   patchPhase = ''
@@ -20,8 +24,14 @@ buildPythonPackage rec {
   # errors with vendored libs
   doCheck = false;
 
-  meta = {
+  pythonImportsCheck = [
+    "invoke"
+  ];
+
+  meta = with lib; {
     description = "Pythonic task execution";
-    license = lib.licenses.bsd2;
+    homepage = "https://www.pyinvoke.org/";
+    license = licenses.bsd2;
+    maintainers = with maintainers; [ ];
   };
 }
diff --git a/pkgs/development/python-modules/limnoria/default.nix b/pkgs/development/python-modules/limnoria/default.nix
index 0467790c297..27550c30dbf 100644
--- a/pkgs/development/python-modules/limnoria/default.nix
+++ b/pkgs/development/python-modules/limnoria/default.nix
@@ -15,14 +15,14 @@
 
 buildPythonPackage rec {
   pname = "limnoria";
-  version = "2022.2.3";
+  version = "2022.3.17";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-Jc11hS+WrRnjgYOUpc+GdkRoNV/DUJhQK6rI2lUkEIA=";
+    hash = "sha256-sSZFbEDlkc+F0PIwvseVEBoQQZVTFypW2nvLmPDD4u0=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/mkdocs-material/default.nix b/pkgs/development/python-modules/mkdocs-material/default.nix
new file mode 100644
index 00000000000..16a81580916
--- /dev/null
+++ b/pkgs/development/python-modules/mkdocs-material/default.nix
@@ -0,0 +1,41 @@
+{ lib, callPackage, buildPythonApplication, fetchFromGitHub
+, jinja2
+, markdown
+, mkdocs
+, mkdocs-material-extensions
+, pygments
+, pymdown-extensions
+}:
+
+buildPythonApplication rec {
+  pname = "mkdocs-material";
+  version = "8.2.5";
+
+  src = fetchFromGitHub {
+    owner = "squidfunk";
+    repo = pname;
+    rev = version;
+    sha256 = "0v30x2cgc5i307p0hsy5h58pfd8w6xpnvimsb75614xlmx3ycaqd";
+  };
+
+  propagatedBuildInputs = [
+    jinja2
+    markdown
+    mkdocs
+    mkdocs-material-extensions
+    pygments
+    pymdown-extensions
+  ];
+
+  # No tests for python
+  doCheck = false;
+
+  pythonImportsCheck = [ "mkdocs" ];
+
+  meta = with lib; {
+    description = "Material for mkdocs";
+    homepage = "https://squidfunk.github.io/mkdocs-material/";
+    license = licenses.mit;
+    maintainers = with maintainers; [ dandellion ];
+  };
+}
diff --git a/pkgs/development/python-modules/mkdocs-material/mkdocs-material-extensions.nix b/pkgs/development/python-modules/mkdocs-material/mkdocs-material-extensions.nix
new file mode 100644
index 00000000000..2d03c8fa8c5
--- /dev/null
+++ b/pkgs/development/python-modules/mkdocs-material/mkdocs-material-extensions.nix
@@ -0,0 +1,24 @@
+{ lib, fetchFromGitHub, buildPythonPackage }:
+
+buildPythonPackage rec {
+  pname = "mkdocs-material-extensions";
+  version = "1.0.3";
+
+  src = fetchFromGitHub {
+    owner = "facelessuser";
+    repo = pname;
+    rev = version;
+    sha256 = "1mvc13lz16apnli2qcqf0dvlm8mshy47jmz2vp72lja6x8jfq2p3";
+  };
+
+  doCheck = false; # Circular dependency
+
+  pythonImportsCheck = [ "materialx" ];
+
+  meta = with lib; {
+    description = "Markdown extension resources for MkDocs Material";
+    homepage = "https://github.com/facelessuser/mkdocs-material-extensions";
+    license = licenses.mit;
+    maintainers = with maintainers; [ dandellion ];
+  };
+}
diff --git a/pkgs/development/python-modules/myfitnesspal/default.nix b/pkgs/development/python-modules/myfitnesspal/default.nix
index 567eeaf6969..10f5713754b 100644
--- a/pkgs/development/python-modules/myfitnesspal/default.nix
+++ b/pkgs/development/python-modules/myfitnesspal/default.nix
@@ -8,11 +8,11 @@
 , measurement
 , python-dateutil
 , requests
-, six
 , rich
 , pytestCheckHook
 , mock
 , nose
+, pythonOlder
 }:
 
 # TODO: Define this package in "all-packages.nix" using "toPythonApplication".
@@ -20,12 +20,14 @@
 
 buildPythonPackage rec {
   pname = "myfitnesspal";
-  version = "1.16.6";
+  version = "1.17.0";
   format = "setuptools";
 
+  disabled = pythonOlder "3.7";
+
   src = fetchPypi {
     inherit pname version;
-    sha256 = "ac07369ede3ca4c6d673e02f2b9e0893b17d079f3085e36fdfdbdd1cba9f37db";
+    sha256 = "sha256-UXFvKQtC44EvscYWXK7KI/do3U0wTWI3zKwvsRdzKrs=";
   };
 
   propagatedBuildInputs = [
@@ -36,7 +38,6 @@ buildPythonPackage rec {
     measurement
     python-dateutil
     requests
-    six
     rich
   ];
 
diff --git a/pkgs/development/python-modules/ocrmypdf/default.nix b/pkgs/development/python-modules/ocrmypdf/default.nix
index ed672b7d01c..a268f34cc71 100644
--- a/pkgs/development/python-modules/ocrmypdf/default.nix
+++ b/pkgs/development/python-modules/ocrmypdf/default.nix
@@ -27,7 +27,7 @@
 
 buildPythonPackage rec {
   pname = "ocrmypdf";
-  version = "13.4.0";
+  version = "13.4.1";
 
   src = fetchFromGitHub {
     owner = "jbarlow83";
@@ -39,7 +39,7 @@ buildPythonPackage rec {
     extraPostFetch = ''
       rm "$out/.git_archival.txt"
     '';
-    sha256 = "sha256-LgHhF+vztXPCn71d87OMn0umLvps7We6vyjdRJZw+3E=";
+    sha256 = "sha256-gxgeEwm3cYNllcmRTZhdyIWWGKXTewyVW314k732swE=";
   };
 
   SETUPTOOLS_SCM_PRETEND_VERSION = version;
diff --git a/pkgs/development/python-modules/pdfminer_six/default.nix b/pkgs/development/python-modules/pdfminer_six/default.nix
index 725fb9ab9f9..6d2b0adcf41 100644
--- a/pkgs/development/python-modules/pdfminer_six/default.nix
+++ b/pkgs/development/python-modules/pdfminer_six/default.nix
@@ -1,8 +1,8 @@
-{ lib, buildPythonPackage, fetchFromGitHub, isPy3k, cryptography, chardet, nose, sortedcontainers }:
+{ lib, buildPythonPackage, fetchFromGitHub, isPy3k, cryptography, chardet, pytestCheckHook }:
 
 buildPythonPackage rec {
   pname = "pdfminer_six";
-  version = "20201018";
+  version = "20220319";
 
   disabled = !isPy3k;
 
@@ -10,10 +10,10 @@ buildPythonPackage rec {
     owner = "pdfminer";
     repo = "pdfminer.six";
     rev = version;
-    sha256 = "1a2fxxnnjqbx344znpvx7cnv1881dk6585ibw01inhfq3w6yj2lr";
+    sha256 = "sha256-sjO7jmHSe4EDmJ1rfiXx+lsHxc+DfKeMet37Nbg03WQ=";
   };
 
-  propagatedBuildInputs = [ chardet cryptography sortedcontainers ];
+  propagatedBuildInputs = [ chardet cryptography ];
 
   postInstall = ''
     for file in $out/bin/*.py; do
@@ -21,10 +21,7 @@ buildPythonPackage rec {
     done
   '';
 
-  checkInputs = [ nose ];
-  checkPhase = ''
-    nosetests
-  '';
+  checkInputs = [ pytestCheckHook ];
 
   meta = with lib; {
     description = "PDF parser and analyzer";
diff --git a/pkgs/development/python-modules/pyaussiebb/default.nix b/pkgs/development/python-modules/pyaussiebb/default.nix
index 0a88f6b0743..d764e93e3f0 100644
--- a/pkgs/development/python-modules/pyaussiebb/default.nix
+++ b/pkgs/development/python-modules/pyaussiebb/default.nix
@@ -11,7 +11,7 @@
 
 buildPythonPackage rec {
   pname = "pyaussiebb";
-  version = "0.0.13";
+  version = "0.0.14";
   format = "pyproject";
 
   disabled = pythonOlder "3.9";
@@ -20,7 +20,7 @@ buildPythonPackage rec {
     owner = "yaleman";
     repo = "aussiebb";
     rev = "v${version}";
-    hash = "sha256-8DX7GwSnpiqUO20a6oLMMTkpTUxWTb8LMD4Uk0lyAOY=";
+    hash = "sha256-Z+xLCKnUnBAH9nm0YR11zx1lyNrIb8BZLFmaZdpnfdw=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/pysaml2/default.nix b/pkgs/development/python-modules/pysaml2/default.nix
index e658ca40be7..1c6d9b45c56 100644
--- a/pkgs/development/python-modules/pysaml2/default.nix
+++ b/pkgs/development/python-modules/pysaml2/default.nix
@@ -22,7 +22,7 @@
 
 buildPythonPackage rec {
   pname = "pysaml2";
-  version = "7.1.1";
+  version = "7.1.2";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
@@ -31,7 +31,7 @@ buildPythonPackage rec {
     owner = "IdentityPython";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-uRfcn3nCK+tx6ol6ZFarOSrDOh0cfC9gZXBZ7EICQzw=";
+    sha256 = "sha256-nyQcQ1OO9PuuQROg+km2vIRF1sZ22MZhiHpmVXWl+is=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/python-crfsuite/default.nix b/pkgs/development/python-modules/python-crfsuite/default.nix
index cd2adc48c6f..a357df95a43 100644
--- a/pkgs/development/python-modules/python-crfsuite/default.nix
+++ b/pkgs/development/python-modules/python-crfsuite/default.nix
@@ -2,6 +2,7 @@
 , buildPythonPackage
 , fetchPypi
 , pytestCheckHook
+, pythonAtLeast
 }:
 
 buildPythonPackage rec {
@@ -32,5 +33,6 @@ buildPythonPackage rec {
     homepage = "https://github.com/scrapinghub/python-crfsuite";
     license = licenses.mit;
     maintainers = teams.tts.members;
+    broken = pythonAtLeast "3.10"; # https://github.com/scrapinghub/python-crfsuite/issues/130
   };
 }
diff --git a/pkgs/development/python-modules/pyvex/default.nix b/pkgs/development/python-modules/pyvex/default.nix
index 595b4c35737..e3a810be916 100644
--- a/pkgs/development/python-modules/pyvex/default.nix
+++ b/pkgs/development/python-modules/pyvex/default.nix
@@ -12,14 +12,14 @@
 
 buildPythonPackage rec {
   pname = "pyvex";
-  version = "9.1.11752";
+  version = "9.1.12332";
   format = "setuptools";
 
   disabled = pythonOlder "3.6";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-DI+Jc5MtDd2XXfjIDtPd8qt4/eQ/3nwbDUqWE2haUhM=";
+    hash = "sha256-e1lruHgppQ8mJbTx6xsUDSkLCYQISqM9c1vsjdQU4eI=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/qiling/default.nix b/pkgs/development/python-modules/qiling/default.nix
index 51c762c8107..72995432814 100644
--- a/pkgs/development/python-modules/qiling/default.nix
+++ b/pkgs/development/python-modules/qiling/default.nix
@@ -8,18 +8,22 @@
 , multiprocess
 , pefile
 , pyelftools
+, pythonOlder
 , python-registry
+, pyyaml
 , unicorn
 }:
 
 buildPythonPackage rec {
   pname = "qiling";
-  version = "1.4.1";
+  version = "1.4.2";
   format = "setuptools";
 
+  disabled = pythonOlder "3.7";
+
   src = fetchPypi {
     inherit pname version;
-    sha256 = "e72dc5856cbda975f962ddf063063a32bd6c3b825f75e0795e94ba6840a7d45f";
+    hash = "sha256-myUGzNP4bf90d2gY5ZlYbVlTG640dj/Qha8/aMydvuw=";
   };
 
   propagatedBuildInputs = [
@@ -30,14 +34,10 @@ buildPythonPackage rec {
     pefile
     pyelftools
     python-registry
+    pyyaml
     unicorn
   ];
 
-  postPatch = ''
-    substituteInPlace setup.py \
-      --replace "pefile==2021.5.24" "pefile>=2021.5.24"
-  '';
-
   # Tests are broken (attempt to import a file that tells you not to import it,
   # amongst other things)
   doCheck = false;
diff --git a/pkgs/development/python-modules/ropper/default.nix b/pkgs/development/python-modules/ropper/default.nix
index 1237fd09ea4..391e8d36139 100644
--- a/pkgs/development/python-modules/ropper/default.nix
+++ b/pkgs/development/python-modules/ropper/default.nix
@@ -1,12 +1,12 @@
 { lib
-, buildPythonApplication
+, buildPythonPackage
 , fetchPypi
 , capstone
 , filebytes
 , pytest
 }:
 
-buildPythonApplication rec {
+buildPythonPackage rec {
   pname = "ropper";
   version = "1.13.6";
 
diff --git a/pkgs/development/python-modules/scikit-hep-testdata/default.nix b/pkgs/development/python-modules/scikit-hep-testdata/default.nix
index d8240fd8333..803daeb2ab3 100644
--- a/pkgs/development/python-modules/scikit-hep-testdata/default.nix
+++ b/pkgs/development/python-modules/scikit-hep-testdata/default.nix
@@ -10,7 +10,7 @@
 
 buildPythonPackage rec {
   pname = "scikit-hep-testdata";
-  version = "0.4.11";
+  version = "0.4.12";
   format = "pyproject";
 
   # fetch from github as we want the data files
@@ -19,7 +19,7 @@ buildPythonPackage rec {
     owner = "scikit-hep";
     repo = pname;
     rev = "v${version}";
-    sha256 = "18r5nk8d5y79ihzjkjm5l0hiw2sjgj87px7vwb0bxbs73f5v353b";
+    sha256 = "sha256-ZnsOmsajW4dDv53I/Cuu97mPJywGiwFhNGpT1WRfxSw=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/scrapy/default.nix b/pkgs/development/python-modules/scrapy/default.nix
index c26ec74ac31..073059aec62 100644
--- a/pkgs/development/python-modules/scrapy/default.nix
+++ b/pkgs/development/python-modules/scrapy/default.nix
@@ -22,6 +22,7 @@
 , service-identity
 , sybil
 , testfixtures
+, tldextract
 , twisted
 , w3lib
 , zope_interface
@@ -29,13 +30,13 @@
 
 buildPythonPackage rec {
   pname = "scrapy";
-  version = "2.5.1";
+  version = "2.6.1";
   disabled = pythonOlder "3.6";
 
   src = fetchPypi {
     inherit version;
     pname = "Scrapy";
-    sha256 = "13af6032476ab4256158220e530411290b3b934dd602bb6dacacbf6d16141f49";
+    sha256 = "56fd55a59d0f329ce752892358abee5a6b50b4fc55a40420ea317dc617553827";
   };
 
   nativeBuildInputs = [
@@ -54,6 +55,7 @@ buildPythonPackage rec {
     pyopenssl
     queuelib
     service-identity
+    tldextract
     twisted
     w3lib
     zope_interface
@@ -68,22 +70,6 @@ buildPythonPackage rec {
     testfixtures
   ];
 
-  patches = [
-    # Require setuptools, https://github.com/scrapy/scrapy/pull/5122
-    (fetchpatch {
-      name = "add-setuptools.patch";
-      url = "https://github.com/scrapy/scrapy/commit/4f500342c8ad4674b191e1fab0d1b2ac944d7d3e.patch";
-      sha256 = "14030sfv1cf7dy4yww02b49mg39cfcg4bv7ys1iwycfqag3xcjda";
-    })
-    # Make Twisted[http2] installation optional, https://github.com/scrapy/scrapy/pull/5113
-    (fetchpatch {
-      name = "remove-h2.patch";
-      url = "https://github.com/scrapy/scrapy/commit/c5b1ee810167266fcd259f263dbfc0fe0204761a.patch";
-      sha256 = "0sa39yx9my4nqww8a12bk9zagx7b56vwy7xpxm4xgjapjl6mcc0k";
-      excludes = [ "tox.ini" ];
-    })
-  ];
-
   LC_ALL = "en_US.UTF-8";
 
   preCheck = ''
diff --git a/pkgs/development/python-modules/typed-settings/default.nix b/pkgs/development/python-modules/typed-settings/default.nix
index ea5092cb2ae..6e903b68407 100644
--- a/pkgs/development/python-modules/typed-settings/default.nix
+++ b/pkgs/development/python-modules/typed-settings/default.nix
@@ -12,13 +12,13 @@
 
 buildPythonPackage rec {
   pname = "typed-settings";
-  version = "0.11.1";
+  version = "1.0.0";
   format = "pyproject";
   disabled = pythonOlder "3.7";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-gcyOeUyRAwU5s+XoQO/yM0tx7QHjDsBeyoe5HRZHtIs=";
+    sha256 = "sha256-c+iOb1F8+9IoRbwpMTdyDfOPW2ZEo4xDAlbzLAxgSfk=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/python-modules/types-pytz/default.nix b/pkgs/development/python-modules/types-pytz/default.nix
index 1fc7dd5f9b6..eecf45c82fa 100644
--- a/pkgs/development/python-modules/types-pytz/default.nix
+++ b/pkgs/development/python-modules/types-pytz/default.nix
@@ -5,12 +5,12 @@
 
 buildPythonPackage rec {
   pname = "types-pytz";
-  version = "2021.3.5";
+  version = "2021.3.6";
   format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-/vjeI47pUTWVIimiojv7h71j1abIWYEGpGz89I8Gnqg=";
+    sha256 = "sha256-dFR/2Q2NirTx7t86NEp9GG2XSGlziV+BIhpxLh4s2ZM=";
   };
 
   # Modules doesn't have tests
diff --git a/pkgs/development/python-modules/types-requests/default.nix b/pkgs/development/python-modules/types-requests/default.nix
index d28f01b128b..d6d7f752a7a 100644
--- a/pkgs/development/python-modules/types-requests/default.nix
+++ b/pkgs/development/python-modules/types-requests/default.nix
@@ -6,12 +6,12 @@
 
 buildPythonPackage rec {
   pname = "types-requests";
-  version = "2.27.13";
+  version = "2.27.14";
   format = "setuptools";
 
   src = fetchPypi {
     inherit pname version;
-    sha256 = "sha256-zwZGAx3WMHETs3gU90PATwcHozVzeMK7Eyb4SEEvW6k=";
+    sha256 = "sha256-BFee4WT3wmWb5GlQ48L41RoIGtJS7xsB1LEvq6XDgQs=";
   };
 
   propagatedBuildInputs = [
diff --git a/pkgs/development/python-modules/types-tabulate/default.nix b/pkgs/development/python-modules/types-tabulate/default.nix
index 9e5c9b628b2..6d7ceade85c 100644
--- a/pkgs/development/python-modules/types-tabulate/default.nix
+++ b/pkgs/development/python-modules/types-tabulate/default.nix
@@ -5,11 +5,11 @@
 
 buildPythonPackage rec {
   pname = "types-tabulate";
-  version = "0.8.5";
+  version = "0.8.6";
 
   src = fetchPypi {
     inherit pname version;
-    hash = "sha256-A/KDvzhOoSG3tqWK+zj03vl/MHBPyhOg2mhpNrDzkqw=";
+    hash = "sha256-P037eVRJwheO1cIU7FEUwESx7t1xrQoQA7xnDwnYcQo=";
   };
 
   # Module doesn't have tests
diff --git a/pkgs/development/python-modules/wrf-python/default.nix b/pkgs/development/python-modules/wrf-python/default.nix
index 94234b2626e..14e81df65fc 100644
--- a/pkgs/development/python-modules/wrf-python/default.nix
+++ b/pkgs/development/python-modules/wrf-python/default.nix
@@ -1,14 +1,27 @@
-{lib, fetchFromGitHub, pythonOlder, buildPythonPackage, gfortran, mock, xarray, wrapt, numpy, netcdf4, setuptools}:
+{lib
+, fetchFromGitHub
+, pythonOlder
+, buildPythonPackage
+, gfortran
+, xarray
+, wrapt
+, numpy
+, netcdf4
+, setuptools
+}:
 
 buildPythonPackage rec {
   pname = "wrf-python";
-  version = "1.3.2.6";
+  version = "1.3.3";
+  format = "setuptools";
+
+  disabled = pythonOlder "3.7";
 
   src = fetchFromGitHub {
     owner = "NCAR";
     repo = "wrf-python";
     rev = version;
-    sha256 = "046kflai71r7xrmdw6jn0ifn5656wj9gpnwlgxkx430dgk7zbc2y";
+    hash = "sha256-+v4FEK0FVE0oAIb18XDTOInHKfxXyykb1ngk9Uxwf0c=";
   };
 
   propagatedBuildInputs = [
@@ -24,9 +37,8 @@ buildPythonPackage rec {
 
   checkInputs = [
     netcdf4
-  ] ++ lib.optional (pythonOlder "3.3") mock;
+  ];
 
-  doCheck = true;
   checkPhase = ''
     runHook preCheck
     cd ./test/ci_tests
@@ -34,10 +46,14 @@ buildPythonPackage rec {
     runHook postCheck
   '';
 
-  meta = {
+  pythonImportsCheck = [
+    "wrf"
+  ];
+
+  meta = with lib; {
     description = "WRF postprocessing library for Python";
     homepage = "http://wrf-python.rtfd.org";
-    license = lib.licenses.asl20;
-    maintainers = with lib.maintainers; [ mhaselsteiner ];
+    license = licenses.asl20;
+    maintainers = with maintainers; [ mhaselsteiner ];
   };
 }
diff --git a/pkgs/development/quickemu/default.nix b/pkgs/development/quickemu/default.nix
index aa9906c77ec..7aa5b1602c3 100644
--- a/pkgs/development/quickemu/default.nix
+++ b/pkgs/development/quickemu/default.nix
@@ -43,17 +43,16 @@ in
 
 stdenv.mkDerivation rec {
   pname = "quickemu";
-  version = "3.11";
+  version = "3.14";
 
   src = fetchFromGitHub {
     owner = "quickemu-project";
     repo = "quickemu";
     rev = version;
-    sha256 = "1xwf9vwbr57wmyxfcqzl1jnmfx3ffh7sfqf0zcdq41wqkm8s106n";
+    sha256="sha256-7zaXazGzb36Nwk/meJ3lGD+l+fylWZYnhttDL1CXN9s=";
   };
 
   patches = [
-    ./efi_vars_ensure_writable.patch
     ./input_overrides.patch
   ];
 
diff --git a/pkgs/development/quickemu/efi_vars_ensure_writable.patch b/pkgs/development/quickemu/efi_vars_ensure_writable.patch
deleted file mode 100644
index e146d0ba768..00000000000
--- a/pkgs/development/quickemu/efi_vars_ensure_writable.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/quickemu b/quickemu
-index a9a60a5..1a932ac 100755
---- a/quickemu
-+++ b/quickemu
-@@ -197,7 +197,7 @@ function efi_vars() {
- 
-   if [ ! -e "${VARS_OUT}" ]; then
-     if [ -e "${VARS_IN}" ]; then
--      cp "${VARS_IN}" "${VARS_OUT}"
-+      cp "${VARS_IN}" "${VARS_OUT}" && chmod +w "${VARS_OUT}"
-     else
-       echo "ERROR! ${VARS_IN} was not found. Please install edk2."
-       exit 1
diff --git a/pkgs/development/quickemu/input_overrides.patch b/pkgs/development/quickemu/input_overrides.patch
index 228624edcf1..42e4d2e1829 100644
--- a/pkgs/development/quickemu/input_overrides.patch
+++ b/pkgs/development/quickemu/input_overrides.patch
@@ -1,20 +1,29 @@
 diff --git a/quickemu b/quickemu
-index 1a932ac..ab2f752 100755
+index 24e1007..39cd5e4 100755
 --- a/quickemu
 +++ b/quickemu
+@@ -196,7 +196,7 @@ function efi_vars() {
+ 
+   if [ ! -e "${VARS_OUT}" ]; then
+     if [ -e "${VARS_IN}" ]; then
+-      cp "${VARS_IN}" "${VARS_OUT}"
++      cp "${VARS_IN}" "${VARS_OUT}" && chmod +w "${VARS_OUT}"
+     else
+       echo "ERROR! ${VARS_IN} was not found. Please install edk2."
+       exit 1
 @@ -383,7 +383,10 @@ function vm_boot() {
      # https://bugzilla.redhat.com/show_bug.cgi?id=1929357#c5
      case ${secureboot} in
        on)
--        if [ -e "/usr/share/OVMF/OVMF_CODE_4M.secboot.fd" ]; then
+-         if [ -e "/usr/share/OVMF/OVMF_CODE_4M.secboot.fd" ]; then
 +        if [[ ${ENV_EFI_CODE_SECURE} && ${ENV_EFI_CODE_SECURE-x} ]] && [[ ${ENV_EFI_VARS_SECURE} && ${ENV_EFI_VARS_SECURE-x} ]]; then
-+          EFI_CODE="${ENV_EFI_CODE_SECURE}"
-+          efi_vars "${ENV_EFI_VARS_SECURE}" "${EFI_VARS}"
++           EFI_CODE="${ENV_EFI_CODE_SECURE}"
++           efi_vars "${ENV_EFI_VARS_SECURE}" "${EFI_VARS}"
 +        elif [ -e "/usr/share/OVMF/OVMF_CODE_4M.secboot.fd" ]; then
            EFI_CODE="/usr/share/OVMF/OVMF_CODE_4M.secboot.fd"
            efi_vars "/usr/share/OVMF/OVMF_VARS_4M.fd" "${EFI_VARS}"
          elif [ -e "/usr/share/edk2/ovmf/OVMF_CODE.secboot.fd" ]; then
-@@ -402,7 +405,10 @@ function vm_boot() {
+@@ -408,7 +411,10 @@ function vm_boot() {
          fi
          ;;
        *)
diff --git a/pkgs/development/tools/allure/default.nix b/pkgs/development/tools/allure/default.nix
new file mode 100644
index 00000000000..2988a559aa7
--- /dev/null
+++ b/pkgs/development/tools/allure/default.nix
@@ -0,0 +1,39 @@
+{ lib, stdenv, makeWrapper, fetchurl, jre }:
+
+let
+  pname = "allure";
+  version = "2.17.3";
+in
+stdenv.mkDerivation rec {
+  inherit pname version;
+  nativeBuildInputs = [ makeWrapper ];
+
+  buildInputs = [ jre ];
+
+  src = fetchurl {
+    url = "https://github.com/allure-framework/allure2/releases/download/${version}/allure-${version}.tgz";
+    sha256 = "sha256-WGeCzWwyLEb4WmlA6Vs8L2TL3NTL6sky5TLeiwV8iJY=";
+  };
+  dontConfigure = true;
+  dontBuild = true;
+
+  installPhase = ''
+    mkdir -p "$out/share"
+    cd "$out/share"
+    tar xvzf $src
+    mkdir -p "$out/bin"
+    makeWrapper $out/share/${pname}-${version}/bin/allure $out/bin/${pname} \
+      --prefix PATH : "${jre}/bin"
+  '';
+
+  dontCheck = true;
+
+  meta = with lib; {
+    homepage = "https://docs.qameta.io/allure/";
+    description = "Allure Report is a flexible, lightweight multi-language test reporting tool.";
+    longDescription = "Allure Report is a flexible, lightweight multi-language test reporting tool. It provides clear graphical reports and allows everyone involved in the development process to extract the maximum of information from the everyday testing process";
+    license = licenses.asl20;
+    maintainers = with maintainers; [ happysalada ];
+  };
+}
+
diff --git a/pkgs/development/tools/analysis/flow/default.nix b/pkgs/development/tools/analysis/flow/default.nix
index 123141d9f45..2d97b64b11e 100644
--- a/pkgs/development/tools/analysis/flow/default.nix
+++ b/pkgs/development/tools/analysis/flow/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "flow";
-  version = "0.174.0";
+  version = "0.174.1";
 
   src = fetchFromGitHub {
     owner = "facebook";
     repo = "flow";
     rev = "v${version}";
-    sha256 = "sha256-s4wu7UW3OKz2xQpNpZZeDjyDdNbRw9w0kauiPWo/SMA=";
+    sha256 = "sha256-lfj6KyB9QYvUy4Ybo8f30omAg4K/jT5MEERJPm0aJ7U=";
   };
 
   installPhase = ''
diff --git a/pkgs/development/tools/cask/default.nix b/pkgs/development/tools/cask/default.nix
index f9e7d215f32..2eadc6b877f 100644
--- a/pkgs/development/tools/cask/default.nix
+++ b/pkgs/development/tools/cask/default.nix
@@ -20,20 +20,22 @@ stdenv.mkDerivation rec {
 
   buildPhase = ''
     runHook preBuild
+
     emacs --batch -L . -f batch-byte-compile cask.el cask-cli.el
+
     runHook postBuild
   '';
 
   installPhase = ''
     runHook preInstall
+
     mkdir -p $out/bin
-    mkdir -p $out/templates
-    mkdir -p $out/share/emacs/site-lisp/cask/bin
-    install -Dm644 *.el *.elc $out/share/emacs/site-lisp/cask
-    install -Dm755 bin/cask $out/share/emacs/site-lisp/cask/bin
-    install -Dm644 templates/* $out/templates/
+    dir=$out/share/emacs/site-lisp/cask
+    install -Dm444 -t $dir     *.el *.elc
+    install -Dm555 -t $dir/bin bin/cask
     touch $out/.no-upgrade
-    ln -s $out/share/emacs/site-lisp/cask/bin/cask $out/bin/cask
+    ln -s $dir/bin/cask $out/bin/cask
+
     runHook postInstall
   '';
 
@@ -48,7 +50,7 @@ stdenv.mkDerivation rec {
 
     homepage = "https://cask.readthedocs.io/en/latest/index.html";
     license = licenses.gpl3Plus;
+    maintainers = with maintainers; [ flexw ];
     platforms = platforms.all;
-    maintainers = [ maintainers.flexw ];
   };
 }
diff --git a/pkgs/development/tools/circup/default.nix b/pkgs/development/tools/circup/default.nix
index b620fa20e46..71ac08d2676 100644
--- a/pkgs/development/tools/circup/default.nix
+++ b/pkgs/development/tools/circup/default.nix
@@ -40,7 +40,7 @@ python3.pkgs.buildPythonApplication rec {
   '';
 
   pythonImportsCheck = [
-    " circup "
+    "circup"
   ];
 
   meta = with lib; {
diff --git a/pkgs/development/tools/cloud-nuke/default.nix b/pkgs/development/tools/cloud-nuke/default.nix
index 3d7c03e3e1c..719e13a5e12 100644
--- a/pkgs/development/tools/cloud-nuke/default.nix
+++ b/pkgs/development/tools/cloud-nuke/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "cloud-nuke";
-  version = "0.11.1";
+  version = "0.11.3";
 
   src = fetchFromGitHub {
     owner = "gruntwork-io";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-BYrJ0I8ZARppy+7CJPfAm/4SIEupGZh1qd1ghZWOD/8=";
+    sha256 = "sha256-iMPTRPsULrwXbx1xQ9db1s0p9a6YejXkwp7sqZ1ayYU=";
   };
 
   vendorSha256 = "sha256-McCbogZvgm9pnVjay9O2CxAh+653JnDMcU4CHD0PTPI=";
diff --git a/pkgs/development/tools/continuous-integration/github-runner/default.nix b/pkgs/development/tools/continuous-integration/github-runner/default.nix
index 222ffb0688f..c8513d004af 100644
--- a/pkgs/development/tools/continuous-integration/github-runner/default.nix
+++ b/pkgs/development/tools/continuous-integration/github-runner/default.nix
@@ -43,13 +43,13 @@ let
 in
 stdenv.mkDerivation rec {
   pname = "github-runner";
-  version = "2.288.1";
+  version = "2.289.1";
 
   src = fetchFromGitHub {
     owner = "actions";
     repo = "runner";
     rev = "v${version}";
-    hash = "sha256-bP+6aAKnu6PxN9eppFXsqOSVSGQ6Lv+gEF2MdEz52WE=";
+    hash = "sha256-5TS/tW1hnDvPZQdR659rw+spLq98niyUms3BrixaKRE=";
   };
 
   nativeBuildInputs = [
diff --git a/pkgs/development/tools/continuous-integration/github-runner/deps.nix b/pkgs/development/tools/continuous-integration/github-runner/deps.nix
index 5b9c1851d9a..0db94420732 100644
--- a/pkgs/development/tools/continuous-integration/github-runner/deps.nix
+++ b/pkgs/development/tools/continuous-integration/github-runner/deps.nix
@@ -3,17 +3,14 @@
   (fetchNuGet { pname = "Microsoft.AspNet.WebApi.Client"; version = "5.2.4"; sha256 = "00fkczf69z2rwarcd8kjjdp47517a0ca6lggn72qbilsp03a5scj"; })
   (fetchNuGet { pname = "Microsoft.AspNetCore.App.Runtime.linux-arm64"; version = "6.0.3"; sha256 = "1jpw4s862j4aa7b7wchi03gxcy02j6hhpbsfbcayiyx6ry788i15"; })
   (fetchNuGet { pname = "Microsoft.AspNetCore.App.Runtime.linux-x64"; version = "6.0.3"; sha256 = "0rrrfgkr7rzhlnsnajvzb1ijkybp99d992bqxy9pbawmq7d60bdk"; })
-  (fetchNuGet { pname = "Microsoft.AspNetCore.App.Runtime.osx-x64"; version = "6.0.3"; sha256 = "09whyl3i9mzy10n5zxlq66lj3l4p29hm75igmdip2fb376zxyam3"; })
   (fetchNuGet { pname = "Microsoft.CodeCoverage"; version = "17.0.0"; sha256 = "18gdbsqf6i79ld4ikqr4jhx9ndsggm865b5xj1xmnmgg12ydp19a"; })
   (fetchNuGet { pname = "Microsoft.CSharp"; version = "4.0.1"; sha256 = "0zxc0apx1gcx361jlq8smc9pfdgmyjh6hpka8dypc9w23nlsh6yj"; })
   (fetchNuGet { pname = "Microsoft.IdentityModel.Logging"; version = "5.2.1"; sha256 = "1gpka9jm2gl6f07pcwzwvaxw9xq1a19i9fskn0qs921c5grhlp3g"; })
   (fetchNuGet { pname = "Microsoft.IdentityModel.Tokens"; version = "5.2.1"; sha256 = "03v6145vr1winq8xxfikydicds4f10qmy1ybyz2gfimnzzx51w00"; })
   (fetchNuGet { pname = "Microsoft.NET.Test.Sdk"; version = "17.0.0"; sha256 = "0bknyf5kig5icwjxls7pcn51x2b2qf91dz9qv67fl70v6cczaz2r"; })
   (fetchNuGet { pname = "Microsoft.NETCore.App.Host.linux-arm64"; version = "6.0.3"; sha256 = "1swbrmpsayy99ycwaq68dx9ydd5h3qv9brwig6ryff1xfn1llndq"; })
-  (fetchNuGet { pname = "Microsoft.NETCore.App.Host.linux-x64"; version = "6.0.3"; sha256 = "1py3nrfvllqlnb9mhs0qwgy7c14n33b2hfb0qc49rx22sqv8ylbp"; })
   (fetchNuGet { pname = "Microsoft.NETCore.App.Runtime.linux-arm64"; version = "6.0.3"; sha256 = "0gjj6p2nnxzhyrmmmwiyrll782famhll9lbgj8cji1i93amxq1pb"; })
   (fetchNuGet { pname = "Microsoft.NETCore.App.Runtime.linux-x64"; version = "6.0.3"; sha256 = "0f04srx6q0jk81a60n956hz32fdngzp0xmdb2x7gyl77gsq8yijj"; })
-  (fetchNuGet { pname = "Microsoft.NETCore.App.Runtime.osx-x64"; version = "6.0.3"; sha256 = "0180ipzzz9pc6f6l17rg5bxz1ghzbapmiqq66kdl33bmbny6vmm9"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.0.1"; sha256 = "01al6cfxp68dscl15z7rxfw9zvhm64dncsw09a1vmdkacsa2v6lr"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.0.1-rc2-24027"; sha256 = "1a0w5fv8slfr4q7m3mh78lb9awdwyz4zv3bb73vybkyq1f6z7lx8"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.1.0"; sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm"; })
diff --git a/pkgs/development/tools/doctl/default.nix b/pkgs/development/tools/doctl/default.nix
index 0e88e1c4926..d7667538e8e 100644
--- a/pkgs/development/tools/doctl/default.nix
+++ b/pkgs/development/tools/doctl/default.nix
@@ -2,7 +2,7 @@
 
 buildGoModule rec {
   pname = "doctl";
-  version = "1.71.0";
+  version = "1.71.1";
 
   vendorSha256 = null;
 
@@ -31,7 +31,7 @@ buildGoModule rec {
     owner = "digitalocean";
     repo = "doctl";
     rev = "v${version}";
-    sha256 = "sha256-cj2+DmJyLa6kFkH9JflaR3yFFXBaVZHO6czJGLEH7L0=";
+    sha256 = "sha256-Y6YabrpM1WcNGp5ksvq3SBuAS6KEUVzEfxsPmBDS+Io=";
   };
 
   meta = with lib; {
diff --git a/pkgs/development/tools/ko/default.nix b/pkgs/development/tools/ko/default.nix
index 3f0f6f6a4cf..ca187dea93d 100644
--- a/pkgs/development/tools/ko/default.nix
+++ b/pkgs/development/tools/ko/default.nix
@@ -7,13 +7,13 @@
 
 buildGoModule rec {
   pname = "ko";
-  version = "0.11.0";
+  version = "0.11.1";
 
   src = fetchFromGitHub {
     owner = "google";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-MT/68znsRPwRnT7150kzE74MunSzyMBftTA50b9ZS3M=";
+    sha256 = "sha256-VtPry8sF+W46gc2lI3uiE4wqilo1WhH+940QKPZ5cyI=";
   };
   vendorSha256 = null;
 
@@ -40,8 +40,9 @@ buildGoModule rec {
 
   postInstall = ''
     installShellCompletion --cmd ko \
-      --bash <($out/bin/ko completion) \
-      --zsh <($out/bin/ko completion --zsh)
+      --bash <($out/bin/ko completion bash) \
+      --fish <($out/bin/ko completion fish) \
+      --zsh <($out/bin/ko completion zsh)
   '';
 
   meta = with lib; {
diff --git a/pkgs/development/tools/ktlint/default.nix b/pkgs/development/tools/ktlint/default.nix
index 73fdb3dff8f..08bfa727758 100644
--- a/pkgs/development/tools/ktlint/default.nix
+++ b/pkgs/development/tools/ktlint/default.nix
@@ -2,11 +2,11 @@
 
 stdenv.mkDerivation rec {
   pname = "ktlint";
-  version = "0.44.0";
+  version = "0.45.0";
 
   src = fetchurl {
     url = "https://github.com/pinterest/ktlint/releases/download/${version}/ktlint";
-    sha256 = "1l2pmvqw8rjl2xmsdp31j8015clshab6p81i3i05h40rjjz57mvr";
+    sha256 = "sha256-M6M1hYA13QTcX4btcni+GB1NEJYEEeuITIpmY2qS9CM=";
   };
 
   nativeBuildInputs = [ makeWrapper ];
diff --git a/pkgs/development/tools/misc/terraform-ls/default.nix b/pkgs/development/tools/misc/terraform-ls/default.nix
index de928d35fa0..e85e014368a 100644
--- a/pkgs/development/tools/misc/terraform-ls/default.nix
+++ b/pkgs/development/tools/misc/terraform-ls/default.nix
@@ -2,15 +2,15 @@
 
 buildGoModule rec {
   pname = "terraform-ls";
-  version = "0.25.2";
+  version = "0.26.0";
 
   src = fetchFromGitHub {
     owner = "hashicorp";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-dFmXROqh1HYPthzmOE07oyZglaQyx7JqxqYDlLpysrM=";
+    sha256 = "sha256-Xq9HojFjUrdThXvQ4M8o4LLmxopVErnN3WGUgI79BCw=";
   };
-  vendorSha256 = "sha256-5g/s8WDzxobORFY12YdDQ6rDmr9gQzViv1FFrVwrVIE=";
+  vendorSha256 = "sha256-iSgK+FOD9olVN4bR2jmtWndaRHrh9pfo/42COTiIh9c=";
 
   ldflags = [ "-s" "-w" "-X main.version=v${version}" "-X main.prerelease=" ];
 
diff --git a/pkgs/development/tools/misc/terraformer/default.nix b/pkgs/development/tools/misc/terraformer/default.nix
index d4609db2d34..ccd7c703366 100644
--- a/pkgs/development/tools/misc/terraformer/default.nix
+++ b/pkgs/development/tools/misc/terraformer/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "terraformer";
-  version = "0.8.18";
+  version = "0.8.19";
 
   src = fetchFromGitHub {
     owner = "GoogleCloudPlatform";
     repo = pname;
     rev = version;
-    sha256 = "sha256-F+OmeXCb0Q2Qqu0T+dqdxvUYszg4ED/zoayH9iO6PCM=";
+    sha256 = "sha256-h6hqgAHyMNnE7AXNPTUM2srgHW9WGcGmO8d30U2IbSo=";
   };
 
-  vendorSha256 = "sha256-fCovSA3ZbKn9DrDlb/SXoe8iQTTEAiiDZFSpHUPmxzo=";
+  vendorSha256 = "sha256-bT6+fH0VJfcgehiiLIDTEYyWgVHIMUGuRaebzm2st60=";
 
   subPackages = [ "." ];
 
diff --git a/pkgs/development/tools/pulumictl/default.nix b/pkgs/development/tools/pulumictl/default.nix
index b9538235c46..baa9734d340 100644
--- a/pkgs/development/tools/pulumictl/default.nix
+++ b/pkgs/development/tools/pulumictl/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "pulumictl";
-  version = "0.0.29";
+  version = "0.0.30";
 
   src = fetchFromGitHub {
     owner = "pulumi";
     repo = "pulumictl";
     rev = "v${version}";
-    sha256 = "sha256-2jTxtgEg+x/NY/LTWT5+9K9bilOw2bLTUIctjd+qwLE=";
+    sha256 = "sha256-xAlc6dYD73JZqV0QDhvqPmtGF99mqhvdBbDhWlY/4PI=";
   };
 
   vendorSha256 = "sha256-xalfnLc6bPBvm2B42+FzpgrOH541HMWmNHChveI792s=";
diff --git a/pkgs/development/tools/rover/default.nix b/pkgs/development/tools/rover/default.nix
new file mode 100644
index 00000000000..7232089ee99
--- /dev/null
+++ b/pkgs/development/tools/rover/default.nix
@@ -0,0 +1,56 @@
+{ lib
+, callPackage
+, fetchFromGitHub
+, perl
+, rustPlatform
+, librusty_v8 ? callPackage ./librusty_v8.nix { }
+}:
+
+rustPlatform.buildRustPackage rec {
+  pname = "rover";
+  version = "0.4.8";
+
+  src = fetchFromGitHub {
+    owner = "apollographql";
+    repo = pname;
+    rev = "v${version}";
+    sha256 = "sha256-9o2bGa9vxN7EprKgsy9TI7AFmwjo1OT1pDyiLierTq0=";
+  };
+
+  cargoSha256 = "sha256-4oNuyZ1xNK2jP9QFEcthCjEQRyvFykd5N0j5KCXrzVY=";
+
+  # The v8 package will try to download a `librusty_v8.a` release at build time
+  # to our read-only filesystem. To avoid this we pre-download the file and
+  # export it via RUSTY_V8_ARCHIVE
+  RUSTY_V8_ARCHIVE = librusty_v8;
+
+  nativeBuildInputs = [
+    perl
+  ];
+
+  # The rover-client's build script (crates/rover-client/build.rs) will try to
+  # download the API's graphql schema at build time to our read-only filesystem.
+  # To avoid this we pre-download it to a location the build script checks.
+  preBuild = ''
+    mkdir crates/rover-client/.schema
+    cp ${./schema}/etag.id        crates/rover-client/.schema/
+    cp ${./schema}/schema.graphql crates/rover-client/.schema/
+  '';
+
+  passthru.updateScript = ./update.sh;
+
+  # Some tests try to write configuration data to a location in the user's home
+  # directory. Since this would be /homeless-shelter during the build, point at
+  # a writeable location instead.
+  preCheck = ''
+    export APOLLO_CONFIG_HOME="$PWD"
+  '';
+
+  meta = with lib; {
+    description = "A CLI for managing and maintaining graphs with Apollo Studio";
+    homepage = "https://www.apollographql.com/docs/rover";
+    license = licenses.mit;
+    maintainers = [ maintainers.ivanbrennan ];
+    platforms = ["x86_64-linux"];
+  };
+}
diff --git a/pkgs/development/tools/rover/librusty_v8.nix b/pkgs/development/tools/rover/librusty_v8.nix
new file mode 100644
index 00000000000..3dcb1f95acd
--- /dev/null
+++ b/pkgs/development/tools/rover/librusty_v8.nix
@@ -0,0 +1,17 @@
+{ rust, stdenv, fetchurl }:
+
+let
+  arch = rust.toRustTarget stdenv.hostPlatform;
+  fetch_librusty_v8 = args: fetchurl {
+    name = "librusty_v8-${args.version}";
+    url = "https://github.com/denoland/rusty_v8/releases/download/v${args.version}/librusty_v8_release_${arch}.a";
+    sha256 = args.shas.${stdenv.hostPlatform.system};
+    meta = { inherit (args) version; };
+  };
+in
+fetch_librusty_v8 {
+  version = "0.38.1";
+  shas = {
+    x86_64-linux = "sha256-vRkb5ZrIOYSKa84UbsJD+Oua0wve7f1Yf3kMg/kkYSY=";
+  };
+}
diff --git a/pkgs/development/tools/rover/schema/etag.id b/pkgs/development/tools/rover/schema/etag.id
new file mode 100644
index 00000000000..369872c3115
--- /dev/null
+++ b/pkgs/development/tools/rover/schema/etag.id
@@ -0,0 +1 @@
+1bdcf8916afc3cea660add457724dddd70154904f4e360e15da27fa7d5c43cd0
diff --git a/pkgs/development/tools/rover/schema/schema.graphql b/pkgs/development/tools/rover/schema/schema.graphql
new file mode 100644
index 00000000000..58198f7e181
--- /dev/null
+++ b/pkgs/development/tools/rover/schema/schema.graphql
@@ -0,0 +1,172 @@
+"""An organization. Can have multiple members and graphs.""" type Account{auditLogExports:[AuditLogExport!]"""These are the roles that the account is able to use""" availableRoles:[UserPermission!]!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request
+with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the
+browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the
+filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to
+MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the account's avatar. Note that CORS is not enabled for these URLs. The size
+argument is used for bandwidth reduction, and should be the size of the image as displayed in the
+application. Apollo's media server will downscale larger images to at least the requested size,
+but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String billingInfo:BillingInfo companyUrl:String currentBillingMonth:BillingMonth currentPlan:BillingPlan!currentPlanV2:BillingPlanV2!currentSubscription:BillingSubscription currentSubscriptionV2:BillingSubscriptionV2 experimentalFeatures:AccountExperimentalFeatures!expiredTrialSubscription:BillingSubscription expiredTrialSubscriptionV2:BillingSubscriptionV2 graphIDAvailable(id:ID!):Boolean!hasBeenOnTrial:Boolean!hasBeenOnTrialV2:Boolean!"""Globally unique identifier, which isn't guaranteed stable (can be changed by administrators).""" id:ID!"""Internal immutable identifier for the account. Only visible to Apollo admins (because it really
+shouldn't be used in normal client apps).""" internalID:ID!invitations(includeAccepted:Boolean!=false):[AccountInvitation!]invoices:[Invoice!]invoicesV2:[InvoiceV2!]!isOnExpiredTrial:Boolean!isOnTrial:Boolean!legacyIsOnTrial:Boolean!memberships:[AccountMembership!]"""Name of the organization, which can change over time and isn't unique.""" name:String!provisionedAt:Timestamp recurlyEmail:String """Returns a different registry related stats pertaining to this account.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow requests(from:Timestamp!to:Timestamp!):Long requestsInCurrentBillingPeriod:Long roles:AccountRoles """How many seats would be included in your next bill, as best estimated today""" seatCountForNextBill:Int seats:Seats secondaryIDs:[ID!]!"""Graphs belonging to this organization.""" services(includeDeleted:Boolean):[Service!]!"""If non-null, this organization tracks its members through an upstream, eg PingOne;
+invitations are not possible on SSO-synchronized account.""" sso:OrganizationSSO state:AccountState """A list of reusable invitations for the organization.""" staticInvitations:[OrganizationInviteLink!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow!@deprecated(reason:"use Account.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow subscriptions:[BillingSubscription!]subscriptionsV2:[BillingSubscriptionV2!]!"""Gets a ticket for this org, by id""" ticket(id:ID!):ZendeskTicket """List of Zendesk tickets submitted for this org""" tickets:[ZendeskTicket!]}"""Columns of AccountBillingUsageStats.""" enum AccountBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilter{and:[AccountBillingUsageStatsFilter!]in:AccountBillingUsageStatsFilterIn not:AccountBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[AccountBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountBillingUsageStatsMetrics{operationCount:Long!}input AccountBillingUsageStatsOrderBySpec{column:AccountBillingUsageStatsColumn!direction:Ordering!}type AccountBillingUsageStatsRecord{"""Dimensions of AccountBillingUsageStats that can be grouped by.""" groupBy:AccountBillingUsageStatsDimensions!"""Metrics of AccountBillingUsageStats that can be aggregated over.""" metrics:AccountBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountChecksStatsMetrics{totalFailedChecks:Long!totalSuccessfulChecks:Long!}type AccountChecksStatsRecord{id:ID!metrics:AccountChecksStatsMetrics!timestamp:Timestamp!}"""Columns of AccountEdgeServerInfos.""" enum AccountEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type AccountEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilter{and:[AccountEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:AccountEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:AccountEdgeServerInfosFilter or:[AccountEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input AccountEdgeServerInfosOrderBySpec{column:AccountEdgeServerInfosColumn!direction:Ordering!}type AccountEdgeServerInfosRecord{"""Dimensions of AccountEdgeServerInfos that can be grouped by.""" groupBy:AccountEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountErrorStats.""" enum AccountErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountErrorStatsFilter{and:[AccountErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountErrorStatsFilterIn not:AccountErrorStatsFilter or:[AccountErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input AccountErrorStatsOrderBySpec{column:AccountErrorStatsColumn!direction:Ordering!}type AccountErrorStatsRecord{"""Dimensions of AccountErrorStats that can be grouped by.""" groupBy:AccountErrorStatsDimensions!"""Metrics of AccountErrorStats that can be aggregated over.""" metrics:AccountErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountExperimentalFeatures{auditLogs:Boolean!championDashboard:Boolean!derivedLaunches:Boolean!federation2Preview:Boolean!preRequestPreview:Boolean!publicVariants:Boolean!variantHomepage:Boolean!webhooksPreview:Boolean!}"""Columns of AccountFieldLatencies.""" enum AccountFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilter{and:[AccountFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldLatenciesFilterIn not:AccountFieldLatenciesFilter or:[AccountFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input AccountFieldLatenciesOrderBySpec{column:AccountFieldLatenciesColumn!direction:Ordering!}type AccountFieldLatenciesRecord{"""Dimensions of AccountFieldLatencies that can be grouped by.""" groupBy:AccountFieldLatenciesDimensions!"""Metrics of AccountFieldLatencies that can be aggregated over.""" metrics:AccountFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldUsage.""" enum AccountFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldUsageFilter{and:[AccountFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldUsageFilterIn not:AccountFieldUsageFilter or:[AccountFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input AccountFieldUsageOrderBySpec{column:AccountFieldUsageColumn!direction:Ordering!}type AccountFieldUsageRecord{"""Dimensions of AccountFieldUsage that can be grouped by.""" groupBy:AccountFieldUsageDimensions!"""Metrics of AccountFieldUsage that can be aggregated over.""" metrics:AccountFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountInvitation{"""An accepted invitation cannot be used anymore""" acceptedAt:Timestamp """Who accepted the invitation""" acceptedBy:User """Time the invitation was created""" createdAt:Timestamp!"""Who created the invitation""" createdBy:User email:String!id:ID!"""Last time we sent an email for the invitation""" lastSentAt:Timestamp """Access role for the invitee""" role:UserPermission!}type AccountMembership{account:Account!createdAt:Timestamp!"""If this membership is a free seat (based on role)""" free:Boolean permission:UserPermission!user:User!}type AccountMutation{auditExport(id:String!):AuditLogExportMutation """Cancel a pending change from an annual team subscription to a monthly team subscription when the current period expires.""" cancelConvertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Cancel account subscriptions, subscriptions will remain active until the end of the paid period""" cancelSubscriptions:Account """Changes an annual team subscription to a monthly team subscription when the current period expires.""" convertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Changes a monthly team subscription to an annual team subscription.""" convertMonthlyTeamSubscriptionToAnnual:Account createStaticInvitation(role:UserPermission!):OrganizationInviteLink """Delete the account's avatar. Requires Account.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Acknowledge that a trial has expired and return to community""" dismissExpiredTrial:Account """Apollo admins only: extend an ongoing trial""" extendTrial(to:Timestamp!):Account """Hard delete an account and all associated services""" hardDelete:Void """Send an invitation to join the account by E-mail""" invite(email:String!role:UserPermission):AccountInvitation """Reactivate a canceled current subscription""" reactivateCurrentSubscription:Account """Refresh billing information from third-party billing service""" refreshBilling:Void """Delete an invitation""" removeInvitation(id:ID):Void """Remove a member of the account""" removeMember(id:ID!):Account requestAuditExport(actors:[ActorInput!]from:Timestamp!graphIds:[String!]to:Timestamp!):Account """Send a new E-mail for an existing invitation""" resendInvitation(id:ID):AccountInvitation revokeStaticInvitation(token:String!):OrganizationInviteLink """Apollo admins only: set the billing plan to an arbitrary plan""" setPlan(id:ID!):Void """Start a new team subscription with the given billing period""" startTeamSubscription(billingPeriod:BillingPeriod!):Account """Start a team trial""" startTrial:Account """This is called by the form shown to users after they cancel their team subscription.""" submitTeamCancellationFeedback(feedback:String!):Void """Apollo admins only: terminate any ongoing subscriptions in the account, without refunds""" terminateSubscriptions:Account """Update the billing address for a Recurly token""" updateBillingAddress(billingAddress:BillingAddressInput!):Account """Update the billing information from a Recurly token""" updateBillingInfo(token:String!):Void updateCompanyUrl(companyUrl:String):Account """Set the E-mail address of the account, used notably for billing""" updateEmail(email:String!):Void """Update the account ID""" updateID(id:ID!):Account """Update the company name""" updateName(name:String!):Void """Apollo admins only: enable or disable an account for PingOne SSO login""" updatePingOneSSOIDPID(idpid:String):Account """Updates the role assigned to new SSO users.""" updateSSODefaultRole(role:UserPermission!):OrganizationSSO """A (currently) internal to Apollo mutation to update a user's role within an organization""" updateUserPermission(permission:UserPermission!userID:ID!):User}"""Columns of AccountOperationCheckStats.""" enum AccountOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type AccountOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilter{and:[AccountOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountOperationCheckStatsFilterIn not:AccountOperationCheckStatsFilter or:[AccountOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input AccountOperationCheckStatsOrderBySpec{column:AccountOperationCheckStatsColumn!direction:Ordering!}type AccountOperationCheckStatsRecord{"""Dimensions of AccountOperationCheckStats that can be grouped by.""" groupBy:AccountOperationCheckStatsDimensions!"""Metrics of AccountOperationCheckStats that can be aggregated over.""" metrics:AccountOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountPublishesStatsMetrics{totalPublishes:Long!}type AccountPublishesStatsRecord{id:ID!metrics:AccountPublishesStatsMetrics!timestamp:Timestamp!}"""Columns of AccountQueryStats.""" enum AccountQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type AccountQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountQueryStatsFilter{and:[AccountQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:AccountQueryStatsFilterIn not:AccountQueryStatsFilter or:[AccountQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input AccountQueryStatsOrderBySpec{column:AccountQueryStatsColumn!direction:Ordering!}type AccountQueryStatsRecord{"""Dimensions of AccountQueryStats that can be grouped by.""" groupBy:AccountQueryStatsDimensions!"""Metrics of AccountQueryStats that can be aggregated over.""" metrics:AccountQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountRoles{canAudit:Boolean!canCreateDevGraph:Boolean!canCreateService:Boolean!canDelete:Boolean!canDownloadInvoice:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canManageMembers:Boolean!canQuery:Boolean!canQueryAudit:Boolean!canQueryBillingInfo:Boolean!canQueryInvoices:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canQueryMembers:Boolean!canQueryStats:Boolean!canReadTickets:Boolean!canRemoveMembers:Boolean!canSetConstrainedPlan:Boolean!canUpdateBillingInfo:Boolean!canUpdateMetadata:Boolean!}enum AccountState{ACTIVE CLOSED UNKNOWN UNPROVISIONED}"""A time window with a specified granularity over a given account.""" type AccountStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:AccountBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountBillingUsageStatsOrderBySpec!]):[AccountBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:AccountEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountEdgeServerInfosOrderBySpec!]):[AccountEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:AccountErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountErrorStatsOrderBySpec!]):[AccountErrorStatsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:AccountFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldLatenciesOrderBySpec!]):[AccountFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:AccountFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldUsageOrderBySpec!]):[AccountFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:AccountOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountOperationCheckStatsOrderBySpec!]):[AccountOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:AccountQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountQueryStatsOrderBySpec!]):[AccountQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:AccountTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTracePathErrorsRefsOrderBySpec!]):[AccountTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:AccountTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTraceRefsOrderBySpec!]):[AccountTraceRefsRecord!]!}"""Columns of AccountTracePathErrorsRefs.""" enum AccountTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type AccountTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in AccountTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilter{and:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:AccountTracePathErrorsRefsFilterIn not:AccountTracePathErrorsRefsFilter or:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input AccountTracePathErrorsRefsOrderBySpec{column:AccountTracePathErrorsRefsColumn!direction:Ordering!}type AccountTracePathErrorsRefsRecord{"""Dimensions of AccountTracePathErrorsRefs that can be grouped by.""" groupBy:AccountTracePathErrorsRefsDimensions!"""Metrics of AccountTracePathErrorsRefs that can be aggregated over.""" metrics:AccountTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountTraceRefs.""" enum AccountTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type AccountTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in AccountTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTraceRefsFilter{and:[AccountTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:AccountTraceRefsFilterIn not:AccountTraceRefsFilter or:[AccountTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input AccountTraceRefsOrderBySpec{column:AccountTraceRefsColumn!direction:Ordering!}type AccountTraceRefsRecord{"""Dimensions of AccountTraceRefs that can be grouped by.""" groupBy:AccountTraceRefsDimensions!"""Metrics of AccountTraceRefs that can be aggregated over.""" metrics:AccountTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""An actor (view of Identity) that performed an action within Studio.""" type Actor{actorId:ID!type:ActorType!}input ActorInput{actorId:ID!type:ActorType!}enum ActorType{ANONYMOUS_USER BACKFILL CRON GRAPH INTERNAL_IDENTITY SYNCHRONIZATION SYSTEM USER}union AddOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union AddOperationCollectionToVariantResult=GraphVariant|InvalidTarget|PermissionError|ValidationError type AffectedClient{"""ID, often the name, of the client set by the user and reported alongside metrics""" clientReferenceId:ID@deprecated(reason:"Unsupported.")"""version of the client set by the user and reported alongside metrics""" clientVersion:String@deprecated(reason:"Unsupported.")}type AffectedQuery{"""If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved""" alreadyApproved:Boolean """If the operation would be ignored if the check ran again""" alreadyIgnored:Boolean """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes""" changes:[ChangeOnOperation!]"""Name to display to the user for the operation""" displayName:String id:ID!"""Determines if this query validates against the proposed schema""" isValid:Boolean """Whether this operation was ignored and its severity was downgraded for that reason""" markedAsIgnored:Boolean """Whether the changes were marked as safe and its severity was downgraded for that reason""" markedAsSafe:Boolean """Name provided for the operation, which can be empty string if it is an anonymous operation""" name:String """First 128 characters of query signature for display""" signature:String}interface ApiKey{id:ID!keyName:String token:String!}type ApiKeyProvision{apiKey:ApiKey!created:Boolean!}type AuditLogExport{actors:[Identity!]bigqueryTriggeredAt:Timestamp completedAt:Timestamp createdAt:Timestamp!exportedFiles:[String!]from:Timestamp!graphs:[Service!]id:ID!requester:User status:AuditStatus!to:Timestamp!}type AuditLogExportMutation{cancel:Account delete:Account}enum AuditStatus{CANCELLED COMPLETED EXPIRED FAILED IN_PROGRESS QUEUED}type AvatarDeleteError{clientMessage:String!code:AvatarDeleteErrorCode!serverMessage:String!}enum AvatarDeleteErrorCode{SSO_USERS_CANNOT_DELETE_SELF_AVATAR}type AvatarUploadError{clientMessage:String!code:AvatarUploadErrorCode!serverMessage:String!}enum AvatarUploadErrorCode{SSO_USERS_CANNOT_UPLOAD_SELF_AVATAR}union AvatarUploadResult=AvatarUploadError|MediaUploadInfo type BillingAddress{address1:String address2:String city:String country:String state:String zip:String}"""Billing address inpnut""" input BillingAddressInput{address1:String!address2:String city:String!country:String!state:String!zip:String!}type BillingInfo{address:BillingAddress!cardType:String firstName:String lastFour:Int lastName:String month:Int vatNumber:String year:Int}enum BillingModel{REQUEST_BASED SEAT_BASED}type BillingMonth{end:Timestamp!requests:Long!start:Timestamp!}enum BillingPeriod{MONTHLY QUARTERLY SEMI_ANNUALLY YEARLY}type BillingPlan{addons:[BillingPlanAddon!]!billingModel:BillingModel!billingPeriod:BillingPeriod capabilities:BillingPlanCapabilities!description:String id:ID!isTrial:Boolean!kind:BillingPlanKind!name:String!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!tier:BillingPlanTier!}type BillingPlanAddon{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanAddonV2{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanCapabilities{clients:Boolean!contracts:Boolean!datadog:Boolean!errors:Boolean!federation:Boolean!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!notifications:Boolean!operationRegistry:Boolean!ranges:[String!]!schemaValidation:Boolean!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}enum BillingPlanKind{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL}enum BillingPlanKindV2{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL UNKNOWN}enum BillingPlanTier{COMMUNITY ENTERPRISE TEAM}enum BillingPlanTierV2{COMMUNITY ENTERPRISE TEAM UNKNOWN}type BillingPlanV2{addons:[BillingPlanAddonV2!]!billingModel:BillingModel!billingPeriod:BillingPeriod clients:Boolean!contracts:Boolean!datadog:Boolean!description:String errors:Boolean!federation:Boolean!id:ID!isTrial:Boolean!kind:BillingPlanKindV2!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!name:String!notifications:Boolean!operationRegistry:Boolean!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!ranges:[String!]!schemaValidation:Boolean!tier:BillingPlanTierV2!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}type BillingSubscription{activatedAt:Timestamp!addons:[BillingSubscriptionAddon!]!autoRenew:Boolean!"""The price of the subscription when ignoring add-ons (such as seats), ie quantity * pricePerUnitInUsdCents""" basePriceInUsdCents:Long!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlan!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" pricePerUnitInUsdCents:Int!quantity:Int!"""Total price of the subscription when it next renews, including add-ons (such as seats)""" renewalTotalPriceInUsdCents:Long!state:SubscriptionState!"""Total price of the subscription, including add-ons (such as seats)""" totalPriceInUsdCents:Long!"""When this subscription's trial period expires (if it is a trial). Not the same as the
+subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}type BillingSubscriptionAddon{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionAddonV2{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionV2{"""The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" activatedAt:Timestamp!addons:[BillingSubscriptionAddonV2!]!autoRenew:Boolean!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlanV2!"""The price of every seat""" pricePerSeatInUsdCents:Int quantity:Int!state:SubscriptionStateV2!"""When this subscription's trial period expires (if it is a trial). Not the same as the
+subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}"""Columns of BillingUsageStats.""" enum BillingUsageStatsColumn{ACCOUNT_ID OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type BillingUsageStatsDimensions{accountId:ID operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in BillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input BillingUsageStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[BillingUsageStatsFilter!]in:BillingUsageStatsFilterIn not:BillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[BillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in BillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input BillingUsageStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type BillingUsageStatsMetrics{operationCount:Long!}input BillingUsageStatsOrderBySpec{column:BillingUsageStatsColumn!direction:Ordering!}type BillingUsageStatsRecord{"""Dimensions of BillingUsageStats that can be grouped by.""" groupBy:BillingUsageStatsDimensions!"""Metrics of BillingUsageStats that can be aggregated over.""" metrics:BillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A blob (base64'ed in JSON & GraphQL)""" scalar Blob type Build{input:BuildInput!result:BuildResult}type BuildError{code:String locations:[SourceLocation!]!message:String!}type BuildFailure{errorMessages:[BuildError!]!}union BuildInput=CompositionBuildInput|FilterBuildInput union BuildResult=BuildFailure|BuildSuccess type BuildSuccess{coreSchema:CoreSchema!}enum CacheScope{PRIVATE PUBLIC UNKNOWN UNRECOGNIZED}"""A specific change to a definition in your schema.""" type Change{affectedQueries:[AffectedQuery!]"""Target arg of change made.""" argNode:NamedIntrospectionArg """Indication of the category of the change (e.g. addition, removal, edit).""" category:ChangeCategory!"""Node related to the top level node that was changed, such as a field in an object,
+a value in an enum or the object of an interface.""" childNode:NamedIntrospectionValue """Indication of the kind of target and action of the change, e.g. 'TYPE_REMOVED'.""" code:String!"""Human-readable description of the change.""" description:String!"""Top level node affected by the change.""" parentNode:NamedIntrospectionType """Severity of the change, either failure or warning.""" severity:ChangeSeverity!"""Indication of the success of the overall change, either failure, warning, or notice.""" type:ChangeType!@deprecated(reason:"use severity instead")}"""Defines a set of categories that a schema change
+can be grouped by.""" enum ChangeCategory{ADDITION DEPRECATION EDIT REMOVAL}"""These schema change codes represent all of the possible changes that can
+occur during the schema diff algorithm.""" enum ChangeCode{"""Type of the argument was changed.""" ARG_CHANGED_TYPE """Argument was changed from nullable to non-nullable.""" ARG_CHANGED_TYPE_OPTIONAL_TO_REQUIRED """Default value added or changed for the argument.""" ARG_DEFAULT_VALUE_CHANGE """Description was added, removed, or updated for argument.""" ARG_DESCRIPTION_CHANGE """Argument to a field was removed.""" ARG_REMOVED """Argument to the directive was removed.""" DIRECTIVE_ARG_REMOVED """Location of the directive was removed.""" DIRECTIVE_LOCATION_REMOVED """Directive was removed.""" DIRECTIVE_REMOVED """Repeatable flag was removed for directive.""" DIRECTIVE_REPEATABLE_REMOVED """Enum was deprecated.""" ENUM_DEPRECATED """Reason for enum deprecation changed.""" ENUM_DEPRECATED_REASON_CHANGE """Enum deprecation was removed.""" ENUM_DEPRECATION_REMOVED """Description was added, removed, or updated for enum value.""" ENUM_VALUE_DESCRIPTION_CHANGE """Field was added to the type.""" FIELD_ADDED """Return type for the field was changed.""" FIELD_CHANGED_TYPE """Field was deprecated.""" FIELD_DEPRECATED """Reason for field deprecation changed.""" FIELD_DEPRECATED_REASON_CHANGE """Field deprecation removed.""" FIELD_DEPRECATION_REMOVED """Description was added, removed, or updated for field.""" FIELD_DESCRIPTION_CHANGE """Type of the field in the input object was changed.""" FIELD_ON_INPUT_OBJECT_CHANGED_TYPE """Field was removed from the type.""" FIELD_REMOVED """Field was removed from the input object.""" FIELD_REMOVED_FROM_INPUT_OBJECT """Non-nullable field was added to the input object. (Deprecated.)""" NON_NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT """Nullable field was added to the input type. (Deprecated.)""" NULLABLE_FIELD_ADDED_TO_INPUT_OBJECT """Nullable argument was added to the field.""" OPTIONAL_ARG_ADDED """Optional field was added to the input type.""" OPTIONAL_FIELD_ADDED_TO_INPUT_OBJECT """Non-nullable argument was added to the field.""" REQUIRED_ARG_ADDED """Non-nullable argument added to directive.""" REQUIRED_DIRECTIVE_ARG_ADDED """Required field was added to the input object.""" REQUIRED_FIELD_ADDED_TO_INPUT_OBJECT """Type was added to the schema.""" TYPE_ADDED """Type now implements the interface.""" TYPE_ADDED_TO_INTERFACE """A new value was added to the enum.""" TYPE_ADDED_TO_UNION """Type was changed from one kind to another.
+Ex: scalar to object or enum to union.""" TYPE_CHANGED_KIND """Description was added, removed, or updated for type.""" TYPE_DESCRIPTION_CHANGE """Type (object or scalar) was removed from the schema.""" TYPE_REMOVED """Type no longer implements the interface.""" TYPE_REMOVED_FROM_INTERFACE """Type is no longer included in the union.""" TYPE_REMOVED_FROM_UNION """A new value was added to the enum.""" VALUE_ADDED_TO_ENUM """Value was removed from the enum.""" VALUE_REMOVED_FROM_ENUM}"""Represents the tuple of static information
+about a particular kind of schema change.""" type ChangeDefinition{category:ChangeCategory!code:ChangeCode!defaultSeverity:ChangeSeverity!}"""Info about a change in the context of an operation it affects""" type ChangeOnOperation{"""Human-readable explanation of the impact of this change on the operation""" impact:String """The semantic info about this change, i.e. info about the change that doesn't depend on the operation""" semanticChange:SemanticChange!}enum ChangeSeverity{FAILURE NOTICE}"""Summary of the changes for a schema diff, computed by placing the changes into categories and then
+counting the size of each category. This categorization can be done in different ways, and
+accordingly there are multiple fields here for each type of categorization.
+
+Note that if an object or interface field is added/removed, there won't be any addition/removal
+changes generated for its arguments or @deprecated usages. If an enum type is added/removed, there
+will be addition/removal changes generated for its values, but not for those values' @deprecated
+usages. Description changes won't be generated for a schema element if that element (or an
+ancestor) was added/removed.""" type ChangeSummary{"""Counts for changes to fields of objects, input objects, and interfaces.""" field:FieldChangeSummaryCounts!"""Counts for all changes.""" total:TotalChangeSummaryCounts!"""Counts for changes to non-field aspects of objects, input objects, and interfaces,
+and all aspects of enums, unions, and scalars.""" type:TypeChangeSummaryCounts!}enum ChangeType{FAILURE NOTICE}type ChangelogLaunchResult{createdAt:Timestamp!schemaTagID:ID!}"""Destination for notifications""" interface Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!}interface ChannelSubscription{channels:[Channel!]!enabled:Boolean!id:ID!variant:String}type CheckConfiguration{"""Time when check configuration was created""" createdAt:Timestamp!"""Clients to ignore during validation""" excludedClients:[ClientFilter!]!"""Operation names to ignore during validation""" excludedOperationNames:[OperationNameFilter]"""Operations to ignore during validation""" excludedOperations:[ExcludedOperation!]!"""Graph that this check configuration belongs to""" graphID:ID!"""ID of the check configuration""" id:ID!"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean!"""Variant overrides for validation""" includedVariants:[String!]!"""Minimum number of requests within the window for an operation to be considered.""" operationCountThreshold:Int!"""Number of requests within the window for an operation to be considered, relative to
+total request count. Expected values are between 0 and 0.05 (minimum 5% of
+total request volume)""" operationCountThresholdPercentage:Float!"""Only check operations from the last <timeRangeSeconds> seconds.
+The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long!"""Time when check configuration was last updated""" updatedAt:Timestamp!"""Identity of the last user to update the check configuration""" updatedBy:Identity}"""Filter options available when listing checks.""" input CheckFilterInput{authors:[String!]branches:[String!]status:CheckFilterInputStatusOption subgraphs:[String!]}"""Options for filtering CheckWorkflows by status""" enum CheckFilterInputStatusOption{FAILED PASSED PENDING}"""The result of performing a subgraph check, including all steps.""" type CheckPartialSchemaResult{"""Overall result of the check. This will be null if composition validation was unsuccessful.""" checkSchemaResult:CheckSchemaResult """Result of compostion run as part of the overall subgraph check.""" compositionValidationResult:CompositionValidationResult!"""If any modifications were detected in the composed core schema""" coreSchemaModified:Boolean!"""Check workflow associated with the overall subgraph check.""" workflow:CheckWorkflow}type CheckSchemaResult{"""Schema diff and affected operations generated by the schema check""" diffToPrevious:SchemaDiff!"""ID of the operations check that was created""" operationsCheckID:ID!"""Generated url to view schema diff in Engine""" targetUrl:String """Workflow associated with this check result""" workflow:CheckWorkflow}type CheckWorkflow{"""The variant provided as a base to check against.  Only the differences from the
+base schema will be tested in operations checks.""" baseVariant:GraphVariant completedAt:Timestamp createdAt:Timestamp!"""Contextual parameters supplied by the runtime environment where the check was run.""" gitContext:GitContext id:ID!"""The name of the implementing service that was responsible for triggering the validation.""" implementingServiceName:String """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String """Only true if the check was triggered from Sandbox Checks page.""" isSandboxCheck:Boolean!"""If this check was created by rerunning, the original check that was rerun.""" rerunOf:CheckWorkflow """Checks created by re-running this check, most recent first.""" reruns(limit:Int!=20):[CheckWorkflow!]startedAt:Timestamp """Overall status of the workflow, based on the underlying task statuses.""" status:CheckWorkflowStatus!"""The set of check tasks associated with this workflow, e.g. OperationsCheck, GraphComposition, etc.""" tasks:[CheckWorkflowTask!]!"""Identity of the user who ran this check""" triggeredBy:Identity """Configuration of validation at the time the check was run.""" validationConfig:SchemaDiffValidationConfig}type CheckWorkflowMutation{"""Re-run a check workflow using the current configuration. A new workflow is created and returned.""" rerun:CheckWorkflowRerunResult}type CheckWorkflowRerunResult{"""Check workflow created by re-running.""" result:CheckWorkflow """Check workflow that was rerun.""" source:CheckWorkflow}enum CheckWorkflowStatus{FAILED PASSED PENDING}interface CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!status:CheckWorkflowTaskStatus!"""The workflow that this task belongs to.""" workflow:CheckWorkflow!}enum CheckWorkflowTaskStatus{BLOCKED FAILED PASSED PENDING}"""Client filter configuration for a graph.""" type ClientFilter{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Options to filter by client reference ID, client name, and client version.
+If passing client version, make sure to either provide a client reference ID or client name.""" input ClientFilterInput{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Filter options to exclude by client reference ID, client name, and client version.""" input ClientInfoFilter{name:String """Ignored""" referenceID:ID version:String}"""Filter options to exclude clients. Used as an output type for SchemaDiffValidationConfig.""" type ClientInfoFilterOutput{name:String version:String}enum ComparisonOperator{EQUALS GREATER_THAN GREATER_THAN_OR_EQUAL_TO LESS_THAN LESS_THAN_OR_EQUAL_TO NOT_EQUALS UNRECOGNIZED}"""The result of composition run in the cloud, upon an attempted subgraph deletion.""" type CompositionAndRemoveResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Whether the removed implementing service existed.""" didExist:Boolean!"""  List of errors during composition. Errors mean that Apollo was unable to compose the
+  graph variant's subgraphs into a GraphQL schema. If present, gateways / routers
+are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!}"""The result of composition run in the cloud, upon attempted publish of a subgraph.""" type CompositionAndUpsertResult{"""The produced composition config, or null if there are any errors.""" compositionConfig:CompositionConfig """List of errors during composition. Errors mean that Apollo was unable to compose the
+graph variant's subgraphs into a supergraph schema. If present, gateways / routers
+are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""Copy text for the launch result of a publish.""" launchCliCopy:String """Link to corresponding launches page on Studio if avaliable.""" launchUrl:String """List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!"""Whether a subgraph was created as part of this mutation.""" wasCreated:Boolean!"""Whether an implementingService was updated as part of this mutation""" wasUpdated:Boolean!}type CompositionBuildInput{subgraphs:[Subgraph!]!version:String}type CompositionCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the composition.""" result:CompositionResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}"""Composition configuration exposed to the gateway.""" type CompositionConfig{"""List of GCS links for implementing services that comprise a composed graph. Is empty if tag/inaccessible is enabled.""" implementingServiceLocations:[ImplementingServiceLocation!]!@deprecated(reason:"Soon we will stop writing to GCS locations")"""Hash of the API schema.""" schemaHash:String!}"""The result of composition run in the cloud.""" type CompositionPublishResult implements CompositionResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the
+graph variant's subgraphs into a supergraph schema. If present, gateways / routers
+are not updated.""" errors:[SchemaCompositionError!]!"""ID for a particular composition.""" graphCompositionID:ID!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument """Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!webhookNotificationBody:String}"""Result of a composition, often as the result of a subgraph check or subgraph publish.
+See implementations for more details.""" interface CompositionResult{"""Supergraph SDL generated by composition (this is not the cSDL, a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the
+graph variant's subgraphs into a supergraph schema. If present, gateways / routers
+are not updated.""" errors:[SchemaCompositionError!]!"""Globally unique identifier for the composition.""" graphCompositionID:ID!"""List of subgraphs included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument}type CompositionStatusSubscription implements ChannelSubscription{channels:[Channel!]!createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!variant:String}"""The composition config exposed to the gateway""" type CompositionValidationDetails{"""List of implementing service partial schemas that comprised the graph composed during validation""" implementingServices:[FederatedImplementingServicePartialSchema!]!"""Hash of the composed schema""" schemaHash:String}"""Metadata about the result of compositions validation run in the cloud, during a subgraph check.""" type CompositionValidationResult implements CompositionResult{"""Describes whether composition succeeded.""" compositionSuccess:Boolean!"""Akin to a composition config, represents the subgraph schemas and corresponding subgraphs that were used
+in running composition. Will be null if any errors are encountered. Also may contain a schema hash if
+one could be computed, which can be used for schema validation.""" compositionValidationDetails:CompositionValidationDetails """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the
+graph variant's subgraphs into a supergraph schema. If present, gateways / routers
+are not updated.""" errors:[SchemaCompositionError!]!"""ID that points to the results of this composition.""" graphCompositionID:ID!"""The implementing service that was responsible for triggering the validation""" proposedImplementingService:FederatedImplementingServicePartialSchema!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph schema document generated by composition.""" supergraphSdl:GraphQLDocument """If created as part of a check workflow, the associated workflow task.""" workflowTask:CompositionCheckTask}type ContractPreview{result:ContractPreviewResult!upstreamLaunch:Launch!}type ContractPreviewErrors{errors:[String!]!failedAt:ContractVariantFailedStep!}union ContractPreviewResult=ContractPreviewErrors|ContractPreviewSuccess type ContractPreviewSuccess{apiDocument:String!coreDocument:String!fieldCount:Int!typeCount:Int!}enum ContractVariantFailedStep{ADD_DIRECTIVE_DEFINITIONS_IF_NOT_PRESENT DIRECTIVE_DEFINITION_LOCATION_AUGMENTING EMPTY_OBJECT_AND_INTERFACE_MASKING EMPTY_UNION_MASKING INPUT_VALIDATION PARSING PARSING_TAG_DIRECTIVES PARTIAL_INTERFACE_MASKING SCHEMA_RETRIEVAL TAG_INHERITING TAG_MATCHING TO_API_SCHEMA TO_FILTER_SCHEMA UNKNOWN VERSION_CHECK}type ContractVariantPreviewErrors{errorMessages:[String!]!failedStep:ContractVariantFailedStep!}union ContractVariantPreviewResult=ContractVariantPreviewErrors|ContractVariantPreviewSuccess type ContractVariantPreviewSuccess{baseApiSchema:String!baseCoreSchema:String!contractApiSchema:String!contractCoreSchema:String!}type ContractVariantUpsertErrors{errorMessages:[String!]!}union ContractVariantUpsertResult=ContractVariantUpsertErrors|ContractVariantUpsertSuccess type ContractVariantUpsertSuccess{contractVariant:GraphVariant!}type CoreSchema{apiDocument:GraphQLDocument!coreDocument:GraphQLDocument!coreHash:String!fieldCount:Int!typeCount:Int!}union CreateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type CronExecution{completedAt:Timestamp failure:String id:ID!job:CronJob!resolvedAt:Timestamp resolvedBy:Actor schedule:String!startedAt:Timestamp!}type CronJob{group:String!name:String!recentExecutions(n:Int):[CronExecution!]!}enum DatadogApiRegion{EU EU1 US US1 US1FED US3 US5}type DatadogMetricsConfig{apiKey:String!apiRegion:DatadogApiRegion!enabled:Boolean!legacyMetricNames:Boolean!}union DeleteOperationCollectionResult=DeleteOperationCollectionSuccess|PermissionError type DeleteOperationCollectionSuccess{sandboxOwner:User variants:[GraphVariant!]!}"""The result of attempting to delete a graph variant.""" type DeleteSchemaTagResult{"""WHether a variant was deleted or not.""" deleted:Boolean!}enum DeletionTargetType{ACCOUNT USER}"""Support for a single directive on a graph variant""" type DirectiveSupportStatus{"""whether the directive is supported on the current graph variant""" enabled:Boolean!"""name of the directive""" name:String!}union DuplicateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type DurationHistogram{averageDurationMs:Float buckets:[DurationHistogramBucket!]!durationMs("""Percentile (between 0 and 1)""" percentile:Float!):Float """Counts per durationBucket, where sequences of zeroes are replaced with the negative of their size""" sparseBuckets:[Long!]!totalCount:Long!totalDurationMs:Float!}type DurationHistogramBucket{count:Long!index:Int!rangeBeginMs:Float!rangeEndMs:Float!}input EdgeServerInfo{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""A unique identifier for the executable GraphQL served by the edge server. length must be <= 64 characters.""" executableSchemaId:String!"""The graph variant, defaults to 'current'""" graphVariant:String!="current" """The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""Columns of EdgeServerInfos.""" enum EdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type EdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in EdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input EdgeServerInfosFilter{and:[EdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:EdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:EdgeServerInfosFilter or:[EdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in EdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input EdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input EdgeServerInfosOrderBySpec{column:EdgeServerInfosColumn!direction:Ordering!}type EdgeServerInfosRecord{"""Dimensions of EdgeServerInfos that can be grouped by.""" groupBy:EdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}enum EmailCategory{EDUCATIONAL}type EmailPreferences{email:String!subscriptions:[EmailCategory!]!unsubscribedFromAll:Boolean!}interface Error{message:String!}"""Columns of ErrorStats.""" enum ErrorStatsColumn{ACCOUNT_ID CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type ErrorStatsDimensions{accountId:ID clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in ErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ErrorStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[ErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ErrorStatsFilterIn not:ErrorStatsFilter or:[ErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in ErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ErrorStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type ErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ErrorStatsOrderBySpec{column:ErrorStatsColumn!direction:Ordering!}type ErrorStatsRecord{"""Dimensions of ErrorStats that can be grouped by.""" groupBy:ErrorStatsDimensions!"""Metrics of ErrorStats that can be aggregated over.""" metrics:ErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}""" Input parameters for run explorer operation event.""" enum EventEnum{CLICK_CHECK_LIST CLICK_GO_TO_GRAPH_SETTINGS RUN_EXPLORER_OPERATION}"""Excluded operation for a graph.""" type ExcludedOperation{"""Operation ID to exclude from schema check.""" ID:ID!}"""Option to filter by operation ID.""" input ExcludedOperationInput{"""Operation ID to exclude from schema check.""" ID:ID!}type FeatureIntros{devGraph:Boolean!federatedGraph:Boolean!freeConsumerSeats:Boolean!}"""Feature Intros Input Type""" input FeatureIntrosInput{devGraph:Boolean federatedGraph:Boolean freeConsumerSeats:Boolean}"""Subgraph. Federated graph variants that are managed by Apollo Studio are composed of subgraphs.
+See https://www.apollographql.com/docs/federation/managed-federation/overview/ for more information.""" type FederatedImplementingService{"""The subgraph schema actively published, used for composition for the graph variant this subgraph belongs to.""" activePartialSchema:PartialSchema!"""Timestamp of when this subgraph was created.""" createdAt:Timestamp!"""The ID of the graph this subgraph belongs to.""" graphID:String!"""Which variant of a graph this subgraph belongs to.""" graphVariant:String!"""Name of the subgraph.""" name:String!"""The particular version/edition of a subgraph, entered by users. Typically a Git SHA or docker image ID.""" revision:String!"""Timestamp for when this subgraph was updated.""" updatedAt:Timestamp!"""URL of the subgraph's GraphQL endpoint.""" url:String}"""A minimal representation of a federated implementing service, using only a name and partial schema SDL""" type FederatedImplementingServicePartialSchema{"""The name of the implementing service""" name:String!"""The partial schema of the implementing service""" sdl:String!}"""Container for a list of subgraphs composing a graph.""" type FederatedImplementingServices{"""The list of underlying subgraphs.""" services:[FederatedImplementingService!]!}"""Counts of changes at the field level, including objects, interfaces, and input fields.""" type FieldChangeSummaryCounts{"""Number of changes that are additions of fields to object, interface, and input types.""" additions:Int!"""Number of changes that are field edits. This includes fields changing type and any field
+deprecation and description changes, but also includes any argument changes and any input object
+field changes.""" edits:Int!"""Number of changes that are removals of fields from object, interface, and input types.""" removals:Int!}"""Columns of FieldLatencies.""" enum FieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldLatenciesFilter{and:[FieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldLatenciesFilterIn not:FieldLatenciesFilter or:[FieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input FieldLatenciesOrderBySpec{column:FieldLatenciesColumn!direction:Ordering!}type FieldLatenciesRecord{"""Dimensions of FieldLatencies that can be grouped by.""" groupBy:FieldLatenciesDimensions!"""Metrics of FieldLatencies that can be aggregated over.""" metrics:FieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldUsage.""" enum FieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldUsageFilter{and:[FieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldUsageFilterIn not:FieldUsageFilter or:[FieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input FieldUsageOrderBySpec{column:FieldUsageColumn!direction:Ordering!}type FieldUsageRecord{"""Dimensions of FieldUsage that can be grouped by.""" groupBy:FieldUsageDimensions!"""Metrics of FieldUsage that can be aggregated over.""" metrics:FieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type FilterBuildInput{filterConfig:FilterConfig!schemaHash:String!}type FilterConfig{exclude:[String!]!include:[String!]!}input FilterConfigInput{exclude:[String!]!include:[String!]!}type GitContext{branch:String commit:ID commitUrl:String committer:String message:String remoteHost:GitRemoteHost remoteUrl:String}"""This is stored with a schema when it is uploaded""" input GitContextInput{branch:String commit:ID committer:String message:String remoteUrl:String}enum GitRemoteHost{BITBUCKET GITHUB GITLAB}type GlobalExperimentalFeatures{operationsCollections:Boolean!sandboxesFullRelease:Boolean!sandboxesPreview:Boolean!sandboxesSchemaChecksPage:Boolean!sandboxesSchemaDiffPage:Boolean!subgraphsInSandbox:Boolean!}type GraphApiKey implements ApiKey{createdAt:Timestamp!createdBy:Identity id:ID!keyName:String role:UserPermission!token:String!}"""A union of all combinations that can comprise the implementingServices for a Service""" union GraphImplementors=FederatedImplementingServices|NonFederatedImplementingService scalar GraphQLDocument """A variant of a graph, often corresponding to an environment where a graph runs (e.g. staging).
+See https://www.apollographql.com/docs/studio/org/graphs/ for more details.""" type GraphVariant{"""As new schema tags keep getting published, activeSchemaPublish refers to the latest.""" activeSchemaPublish:SchemaTag """The version of composition currently in use, if applicable""" compositionVersion:String """Filter configuration used to create the contract schema""" contractFilterConfig:FilterConfig """Preview a Contract schema built from this source variant.""" contractPreview(filters:FilterConfigInput!):ContractPreview!"""Explorer setting for default headers for a graph""" defaultHeaders:String derivedVariantCount:Int!"""Graph the variant belongs to.""" graph:Service!"""Graph ID of the variant. Prefer using graph { id } when feasible.""" graphId:String!"""If the variant has managed subgraphs.""" hasManagedSubgraphs:Boolean """Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Represents whether this variant is a Contract.""" isContract:Boolean!"""Is this variant one of the current user's favorite variants?""" isFavoriteOfCurrentUser:Boolean!"""If the variant has managed subgraphs.""" isFederated:Boolean@deprecated(reason:"Replaced by hasManagedSubgraphs")"""If the variant is protected""" isProtected:Boolean!isPublic:Boolean!"""Represents whether this variant should be listed in the public variants directory. This can only be true if the variant is also public.""" isPubliclyListed:Boolean!"""Represents whether Apollo has verified the authenticity of this public variant. This can only be true if the variant is also public.""" isVerified:Boolean!"""Latest approved launch for the variant, and what is served through Uplink.""" latestApprovedLaunch:Launch """Latest launch for the variant, whether successful or not.""" latestLaunch:Launch """Latest publication for the variant.""" latestPublication:SchemaTag launch(id:ID!):Launch launchHistory(limit:Int!=100):[Launch!]!links:[LinkInfo!]"""Name of the variant, like `variant`.""" name:String!operationCollections:[OperationCollection!]!"""Which permissions the current user has for interacting with this variant""" permissions:GraphVariantPermissions!"""Generate a federated operation plan for a given operation""" plan(document:GraphQLDocument!operationName:String):QueryPlan """Explorer setting for preflight script to run before the actual GraphQL operations is run.""" preflightScript:String readme:Readme """Registry stats for this particular graph variant""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """The total number of requests for this variant in the last 24 hours""" requestsInLastDay:Long """If the graphql endpoint is set up to accept cookies.""" sendCookies:Boolean sourceVariant:GraphVariant """List of subgraphs that comprise a variant, null if not federated.
+Set includeDeleted to see deleted subgraphs.""" subgraphs(includeDeleted:Boolean!=false):[FederatedImplementingService!]"""URL where subscription operations can be executed.""" subscriptionUrl:String """A list of supported directives""" supportedDirectives:[DirectiveSupportStatus!]"""URL where non-subscription operations can be executed.""" url:String """The last instant that usage information (e.g. operation stat, client stats) was reported for this variant""" usageLastReportedAt:Timestamp}"""Result of looking up a variant by ref""" union GraphVariantLookup=GraphVariant|InvalidRefFormat """Modifies a variant of a graph, also called a schema tag in parts of our product.""" type GraphVariantMutation{addLinkToVariant(title:String type:LinkInfoType!url:String!):GraphVariant!configureComposition(enableTagAndInaccessible:Boolean version:String):GraphVariant enableTagAndInaccessible(enabled:Boolean!):GraphVariant@deprecated(reason:"Use configureComposition instead")"""Graph ID of the variant""" graphId:String!"""Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Name of the variant, like `variant`.""" name:String!relaunch:RelaunchResult!removeLinkFromVariant(linkInfoId:ID!):GraphVariant!setIsFavoriteOfCurrentUser(favorite:Boolean!):GraphVariant!updateDefaultHeaders(defaultHeaders:String):GraphVariant updateIsProtected(isProtected:Boolean!):GraphVariant updatePreflightScript(preflightScript:String):GraphVariant updateSendCookies(sendCookies:Boolean!):GraphVariant updateSubscriptionURL(subscriptionUrl:String):GraphVariant updateURL(url:String):GraphVariant updateVariantIsPublic(isPublic:Boolean!):GraphVariant updateVariantIsPubliclyListed(isPubliclyListed:Boolean!):GraphVariant updateVariantIsVerified(isVerified:Boolean!):GraphVariant updateVariantReadme(readme:String!):GraphVariant}"""A map from permission String to boolean that the currently authenticated user is allowed for a particular graph variant.""" type GraphVariantPermissions{canCreateCollectionInVariant:Boolean!"""Whether the currently authenticated user is permitted to manage/update the build configuration (e.g. build pipeline version) for this variant.""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to update variant-level settings for the Schema Explorer.""" canManageExplorerSettings:Boolean!"""Whether the currently authenticated user is permitted to publish schemas to this variant.""" canPushSchemas:Boolean!"""Whether the currently authenticated user is permitted to view details regarding the build configuration (e.g. build pipeline version) for this variant.""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to download schemas associated to this variant.""" canQuerySchemas:Boolean!canShareCollectionInVariant:Boolean!canUpdateVariantLinkInfo:Boolean!"""Whether the currently authenticated user is permitted to update the README for this variant.""" canUpdateVariantReadme:Boolean!variantId:ID!}enum HTTPMethod{CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE UNKNOWN UNRECOGNIZED}input HistoricQueryParameters{"""A list of clients to filter out during validation.""" excludedClients:[ClientInfoFilter!]=null """A list of operation names to filter out during validation.""" excludedOperationNames:[OperationNameFilterInput!]=null from:Timestamp="-86400" """A list of operation IDs to filter out during validation.""" ignoredOperations:[ID!]=null """A list of variants to include in the validation. If no variants are provided
+then this defaults to the "current" variant along with the base variant. The
+base variant indicates the schema that generates diff and marks the metrics that
+are checked for broken queries. We union this base variant with the untagged values('',
+same as null inside of `in`, and 'current') in this metrics fetch. This strategy
+supports users who have not tagged their metrics or schema.""" includedVariants:[String!]=null """Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int=1 """Number of requests within the window for a query to be considered, relative to
+total request count. Expected values are between 0 and 0.05 (minimum 5% of total
+request volume)""" queryCountThresholdPercentage:Float=0 to:Timestamp="-0"}"""An identity (e.g. Anonymous, a specific User) within Apollo Studio. See implementations.""" interface Identity{"""A view of the identity as an Actor type.""" asActor:Actor!"""An identifier for a given identity, unique within the context of the identity type.""" id:ID!"""A human-readable name for the identity in question.""" name:String!}"""An actor's identity and info about the client they used to perform the action""" type IdentityAndClientInfo{"""Client name provided when the actor performed the action""" clientName:String """Client version provided when the actor performed the action""" clientVersion:String """Identity info about the actor""" identity:Identity}union IdentityMutation=ServiceMutation|UserMutation type IgnoreOperationsInChecksResult{graph:Service!}"""The location of the implementing service config file in storage""" type ImplementingServiceLocation{"""The name of the implementing service""" name:String!"""The path in storage to access the implementing service config file""" path:String!}type InternalAdminUser{role:InternalMdgAdminRole!userID:String!}type InternalIdentity implements Identity{accounts:[Account!]!asActor:Actor!email:String id:ID!name:String!}enum InternalMdgAdminRole{INTERNAL_MDG_READ_ONLY INTERNAL_MDG_SALES INTERNAL_MDG_SUPER_ADMIN INTERNAL_MDG_SUPPORT}type IntrospectionDirective{args:[IntrospectionInputValue!]!description:String locations:[IntrospectionDirectiveLocation!]!name:String!}input IntrospectionDirectiveInput{args:[IntrospectionInputValueInput!]!description:String isRepeatable:Boolean locations:[IntrospectionDirectiveLocation!]!name:String!}"""__DirectiveLocation introspection type""" enum IntrospectionDirectiveLocation{"""Location adjacent to an argument definition.""" ARGUMENT_DEFINITION """Location adjacent to an enum definition.""" ENUM """Location adjacent to an enum value definition.""" ENUM_VALUE """Location adjacent to a field.""" FIELD """Location adjacent to a field definition.""" FIELD_DEFINITION """Location adjacent to a fragment definition.""" FRAGMENT_DEFINITION """Location adjacent to a fragment spread.""" FRAGMENT_SPREAD """Location adjacent to an inline fragment.""" INLINE_FRAGMENT """Location adjacent to an input object field definition.""" INPUT_FIELD_DEFINITION """Location adjacent to an input object type definition.""" INPUT_OBJECT """Location adjacent to an interface definition.""" INTERFACE """Location adjacent to a mutation operation.""" MUTATION """Location adjacent to an object type definition.""" OBJECT """Location adjacent to a query operation.""" QUERY """Location adjacent to a scalar definition.""" SCALAR """Location adjacent to a schema definition.""" SCHEMA """Location adjacent to a subscription operation.""" SUBSCRIPTION """Location adjacent to a union definition.""" UNION """Location adjacent to a variable definition.""" VARIABLE_DEFINITION}"""Values associated with introspection result for an enum value""" type IntrospectionEnumValue{depreactionReason:String@deprecated(reason:"Use deprecationReason instead")deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""__EnumValue introspection type""" input IntrospectionEnumValueInput{deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""Values associated with introspection result for field""" type IntrospectionField{args:[IntrospectionInputValue!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionType!}"""__Field introspection type""" input IntrospectionFieldInput{args:[IntrospectionInputValueInput!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionTypeInput!}"""Values associated with introspection result for an input field""" type IntrospectionInputValue{defaultValue:String description:String name:String!type:IntrospectionType!}"""__Value introspection type""" input IntrospectionInputValueInput{defaultValue:String deprecationReason:String description:String isDeprecated:Boolean name:String!type:IntrospectionTypeInput!}type IntrospectionSchema{directives:[IntrospectionDirective!]!mutationType:IntrospectionType queryType:IntrospectionType!subscriptionType:IntrospectionType types(filter:TypeFilterConfig={includeAbstractTypes:true includeBuiltInTypes:true includeIntrospectionTypes:true}):[IntrospectionType!]!}"""__Schema introspection type""" input IntrospectionSchemaInput{description:String directives:[IntrospectionDirectiveInput!]!mutationType:IntrospectionTypeRefInput queryType:IntrospectionTypeRefInput!subscriptionType:IntrospectionTypeRefInput types:[IntrospectionTypeInput!]}"""Object containing all possible values for an introspectionType""" type IntrospectionType{"""the base kind of the type this references, ignoring lists and nullability""" baseKind:IntrospectionTypeKind description:String enumValues(includeDeprecated:Boolean=false):[IntrospectionEnumValue!]fields:[IntrospectionField!]inputFields:[IntrospectionInputValue!]interfaces:[IntrospectionType!]kind:IntrospectionTypeKind name:String ofType:IntrospectionType possibleTypes:[IntrospectionType!]"""printed representation of type, including nested nullability and list ofTypes""" printed:String!}"""__Type introspection type""" input IntrospectionTypeInput{description:String enumValues:[IntrospectionEnumValueInput!]fields:[IntrospectionFieldInput!]inputFields:[IntrospectionInputValueInput!]interfaces:[IntrospectionTypeInput!]kind:IntrospectionTypeKind!name:String ofType:IntrospectionTypeInput possibleTypes:[IntrospectionTypeInput!]specifiedByUrl:String}enum IntrospectionTypeKind{"""Indicates this type is an enum. 'enumValues' is a valid field.""" ENUM """Indicates this type is an input object. 'inputFields' is a valid field.""" INPUT_OBJECT """Indicates this type is an interface. 'fields' and 'possibleTypes' are valid
+fields""" INTERFACE """Indicates this type is a list. 'ofType' is a valid field.""" LIST """Indicates this type is a non-null. 'ofType' is a valid field.""" NON_NULL """Indicates this type is an object. 'fields' and 'interfaces' are valid fields.""" OBJECT """Indicates this type is a scalar.""" SCALAR """Indicates this type is a union. 'possibleTypes' is a valid field.""" UNION}"""Shallow __Type introspection type""" input IntrospectionTypeRefInput{kind:String name:String!}type InvalidOperation{errors:[OperationValidationError!]signature:ID!}"""Type returned by reference lookup when the reference was invalid""" type InvalidRefFormat implements Error{message:String!}type InvalidTarget implements Error{message:String!}type Invoice{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceState!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}enum InvoiceState{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}enum InvoiceStateV2{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}type InvoiceV2{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceStateV2!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}type Launch{approvedAt:Timestamp build:Build buildInput:BuildInput!completedAt:Timestamp createdAt:Timestamp!downstreamLaunches:[Launch!]!graphId:String!graphVariant:String!id:ID!isAvailable:Boolean isCompleted:Boolean isPublished:Boolean isTarget:Boolean latestSequenceStep:LaunchSequenceStep publication:SchemaTag results:[LaunchResult!]!schemaTag:SchemaTag sequence:[LaunchSequenceStep!]!shortenedID:String!status:LaunchStatus!subgraphChanges:[SubgraphChange!]supersededAt:Timestamp supersededBy:Launch upstreamLaunch:Launch}"""more result types will be supported in the future""" union LaunchResult=ChangelogLaunchResult type LaunchSequenceBuildStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCheckStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCompletedStep{completedAt:Timestamp}type LaunchSequenceInitiatedStep{startedAt:Timestamp}type LaunchSequencePublishStep{completedAt:Timestamp startedAt:Timestamp}union LaunchSequenceStep=LaunchSequenceBuildStep|LaunchSequenceCheckStep|LaunchSequenceCompletedStep|LaunchSequenceInitiatedStep|LaunchSequencePublishStep|LaunchSequenceSupersededStep type LaunchSequenceSupersededStep{completedAt:Timestamp}enum LaunchStatus{LAUNCH_COMPLETED LAUNCH_FAILED LAUNCH_INITIATED}type LinkInfo{createdAt:Timestamp!id:ID!title:String type:LinkInfoType!url:String!}enum LinkInfoType{DEVELOPER_PORTAL OTHER REPOSITORY}"""Long type""" scalar Long type MarkChangesForOperationAsSafeResult{"""Nice to have for the frontend since the Apollo cache is already watching for AffectedQuery to update.
+This might return null if no behavior changes were found for the affected operation ID.
+This is a weird situation that should never happen.""" affectedOperation:AffectedQuery message:String!success:Boolean!}type MediaUploadInfo{csrfToken:String!maxContentLength:Int!url:String!}union MoveOperationCollectionEntryResult=InvalidTarget|MoveOperationCollectionEntrySuccess|PermissionError type MoveOperationCollectionEntrySuccess{operation:OperationCollectionEntry!originCollection:OperationCollection!targetCollection:OperationCollection!}type Mutation{account(id:ID!):AccountMutation """Creates an operation collection for the given variantRefs, or make a sandbox collection without variantRefs.""" createOperationCollection(description:String editRoles:[UserPermission!]isSandbox:Boolean!isShared:Boolean!name:String!variantRefs:[ID!]):CreateOperationCollectionResult!"""Finalize a password reset with a token included in the E-mail link,
+returns the corresponding login email when successful""" finalizePasswordReset(newPassword:String!resetToken:String!):String """Mutation a graph.""" graph(id:ID!):ServiceMutation """Join an account with a token""" joinAccount(accountId:ID!joinToken:String!):Account me:IdentityMutation newAccount(companyUrl:String id:ID!):Account newService(accountId:ID!description:String hiddenFromUninvitedNonAdminAccountMembers:Boolean!=false id:ID!isDev:Boolean!=false name:String title:String):Service operationCollection(id:ID!):OperationCollectionMutation """Refresh all plans from third-party billing service""" plansRefreshBilling:Void """Report a running GraphQL server's schema.""" reportSchema("""Only sent if previously requested i.e. received ReportSchemaResult with withCoreSchema = true. This is a GraphQL schema document as a string. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchema:String """Information about server and its schema.""" report:SchemaReport!):ReportSchemaResult """Ask for a user's password to be reset by E-mail""" resetPassword(email:String!):Void resolveAllInternalCronExecutions(group:String name:String):Void resolveInternalCronExecution(id:ID!):CronExecution service(id:ID!):ServiceMutation """Set the subscriptions for a given email""" setSubscriptions(email:String!subscriptions:[EmailCategory!]!token:String!):EmailPreferences """Set the studio settings for the current user""" setUserSettings(newSettings:UserSettingsInput):UserSettings signUp(email:String!fullName:String!password:String!referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """This is called by the form shown to users after they delete their user or organization account.""" submitPostDeletionFeedback(feedback:String!targetIdentifier:ID!targetType:DeletionTargetType!):Void """Mutation for basic engagement tracking in studio""" track(event:EventEnum!graphID:String!graphVariant:String!="current"):Void """Rover session tracking. Reserved to https://rover.apollo.dev/telemetry (https://github.com/apollographql/orbiter).""" trackRoverSession(anonymousId:ID!arguments:[RoverArgumentInput!]!ci:String command:String!cwdHash:SHA256!os:String!remoteUrlHash:SHA256!sessionId:ID!version:String!):Void """Unsubscribe a given email from all emails""" unsubscribeFromAll(email:String!token:String!):EmailPreferences user(id:ID!):UserMutation}type NamedIntrospectionArg{description:String name:String}type NamedIntrospectionArgNoDescription{name:String}"""The shared fields for a named introspection type. Currently this is returned for the
+top level value affected by a change. In the future, we may update this
+type to be an interface, which is extended by the more specific types:
+scalar, object, input object, union, interface, and enum
+
+For an in-depth look at where these types come from, see:
+https://github.com/DefinitelyTyped/DefinitelyTyped/blob/659eb50d3/types/graphql/utilities/introspectionQuery.d.ts#L31-L37""" type NamedIntrospectionType{description:String kind:IntrospectionTypeKind name:String}type NamedIntrospectionTypeNoDescription{name:String}"""Introspection values that can be children of other types for changes, such
+as input fields, objects in interfaces, enum values. In the future, this
+value could become an interface to allow fields specific to the types
+returned.""" type NamedIntrospectionValue{description:String name:String printedType:String}type NamedIntrospectionValueNoDescription{name:String printedType:String}"""A non-federated service for a monolithic graph.""" type NonFederatedImplementingService{"""Timestamp of when this implementing service was created.""" createdAt:Timestamp!"""Identifies which graph this non-implementing service belongs to.
+Formerly known as "service_id".""" graphID:String!"""Specifies which variant of a graph this implementing service belongs to".
+Formerly known as "tag".""" graphVariant:String!}type NotFoundError implements Error{message:String!}"""Arbitrary JSON object""" scalar Object type OdysseyCertification{certificationId:String!earnedAt:Timestamp!id:ID!owner:OdysseyCertificationOwner}type OdysseyCertificationOwner{fullName:String!id:ID!}type OdysseyCourse{completedAt:Timestamp enrolledAt:Timestamp id:ID!}input OdysseyCourseInput{completedAt:Timestamp courseId:String!}type OdysseyTask{completedAt:Timestamp id:ID!value:String}input OdysseyTaskInput{completedAt:Timestamp taskId:String!value:String}type Operation{id:ID!name:String signature:String truncated:Boolean!}type OperationAcceptedChange{acceptedAt:Timestamp!acceptedBy:Identity!change:StoredApprovedChange!checkID:ID!graphID:ID!id:ID!operationID:String!}"""Columns of OperationCheckStats.""" enum OperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type OperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in OperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input OperationCheckStatsFilter{and:[OperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:OperationCheckStatsFilterIn not:OperationCheckStatsFilter or:[OperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in OperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input OperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type OperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input OperationCheckStatsOrderBySpec{column:OperationCheckStatsColumn!direction:Ordering!}type OperationCheckStatsRecord{"""Dimensions of OperationCheckStats that can be grouped by.""" groupBy:OperationCheckStatsDimensions!"""Metrics of OperationCheckStats that can be aggregated over.""" metrics:OperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type OperationCollection{createdAt:Timestamp!createdBy:Identity description:String """If a user has any of these roles, they will be able to edit this
+collection. This will be null if and only if \`isShared\` is false""" editRoles:[UserPermission!]id:ID!isFavorite:Boolean!isSandbox:Boolean!isShared:Boolean!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!operation(id:ID!):OperationCollectionEntryResult operations:[OperationCollectionEntry!]!""" Permissions the current user has for this collection""" permissions:OperationCollectionPermissions!variants:[GraphVariant!]!}type OperationCollectionEntry{collection:OperationCollection!createdAt:Timestamp!createdBy:Identity currentOperationRevision:OperationCollectionEntryState!id:ID!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!orderingIndex:String!}type OperationCollectionEntryMutation{moveToCollection(collectionId:ID!lowerOrderingBound:String upperOrderingBound:String):MoveOperationCollectionEntryResult!reorderEntry(lowerOrderingBound:String upperOrderingBound:String):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionEntryResult updateValues(operationInput:OperationCollectionEntryStateInput!):UpdateOperationCollectionEntryResult}union OperationCollectionEntryMutationResult=NotFoundError|OperationCollectionEntryMutation|PermissionError union OperationCollectionEntryResult=NotFoundError|OperationCollectionEntry type OperationCollectionEntryState{body:String!createdAt:Timestamp!createdBy:Identity headers:[OperationHeader!]variables:String}input OperationCollectionEntryStateInput{body:String!headers:[OperationHeaderInput!]""" I'm assuming this is non null""" variables:String}type OperationCollectionMutation{addOperation(name:String!operationInput:OperationCollectionEntryStateInput!):AddOperationCollectionEntryResult addToVariant(variantRef:ID!):AddOperationCollectionToVariantResult!@deprecated(reason:"Will throw NotImplemented")delete:DeleteOperationCollectionResult deleteOperation(id:ID!):RemoveOperationCollectionEntryResult duplicateCollection(description:String isSandbox:Boolean!isShared:Boolean!name:String!variantRef:ID):DuplicateOperationCollectionResult!operation(id:ID!):OperationCollectionEntryMutationResult removeFromVariant(variantRef:ID!):RemoveOperationCollectionFromVariantResult!@deprecated(reason:"Will throw NotImplemented")updateDescription(description:String):UpdateOperationCollectionResult updateEditRoles(editRoles:[UserPermission!]!):UpdateOperationCollectionResult updateIsFavorite(isFavorite:Boolean!):UpdateOperationCollectionResult updateIsShared(isShared:Boolean!):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionResult}type OperationCollectionPermissions{canEditOperations:Boolean!canManage:Boolean!canReadOperations:Boolean!}union OperationCollectionResult=NotFoundError|OperationCollection|PermissionError type OperationDocument{"""Operation document body""" body:String!"""Operation name""" name:String}input OperationDocumentInput{"""Operation document body""" body:String!"""Operation name""" name:String}type OperationHeader{name:String!value:String!}input OperationHeaderInput{name:String!value:String!}"""Operation name filter configuration for a graph.""" type OperationNameFilter{"""name of the operation by the user and reported alongside metrics""" name:String!}"""Options to filter by operation name.""" input OperationNameFilterInput{"""name of the operation set by the user and reported alongside metrics""" name:String!}type OperationValidationError{message:String!}type OperationsCheckResult{"""Operations affected by all changes in diff""" affectedQueries:[AffectedQuery!]"""Summary/counts for all changes in diff""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations""" changes:[Change!]!"""Indication of the success of the change, either failure, warning, or notice.""" checkSeverity:ChangeSeverity!"""The variant that was used as a base to check against""" checkedVariant:GraphVariant!createdAt:Timestamp!id:ID!"""Number of affected query operations that are neither marked as SAFE or IGNORED""" numberOfAffectedOperations:Int!"""Number of operations that were validated during schema diff""" numberOfCheckedOperations:Int!workflowTask:OperationsCheckTask!}type OperationsCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the check.""" result:OperationsCheckResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}enum Ordering{ASCENDING DESCENDING}"""A reusable invite link for an organization.""" type OrganizationInviteLink{createdAt:Timestamp!"""A joinToken that can be passed to Mutation.joinAccount to join the organization.""" joinToken:String!"""The role that the user will receive if they join the organization with this link.""" role:UserPermission!}type OrganizationSSO{defaultRole:UserPermission!idpid:ID!provider:OrganizationSSOProvider!}enum OrganizationSSOProvider{PINGONE}"""PagerDuty notification channel""" type PagerDutyChannel implements Channel{id:ID!name:String!routingKey:String!subscriptions:[ChannelSubscription!]!}"""PagerDuty notification channel parameters""" input PagerDutyChannelInput{name:String routingKey:String!}"""Schema for a subgraph with associated metadata""" type PartialSchema{"""Timestamp for when the partial schema was created""" createdAt:Timestamp!"""If this sdl is currently actively composed in the gateway, this is true""" isLive:Boolean!"""The GraphQL document for a subgraph schema.""" sdl:String!"""The path of deep storage to find the raw enriched partial schema file""" sdlPath:String!}"""Input for registering a partial schema to an implementing service.
+One of the fields must be specified (validated server-side).
+
+If a new partialSchemaSDL is passed in, this operation will store it before
+creating the association.
+
+If both the sdl and hash are specified, an error will be thrown if the provided
+hash doesn't match our hash of the sdl contents. If the sdl field is specified,
+the hash does not need to be and will be computed server-side.""" input PartialSchemaInput{"""Hash of the partial schema to associate; error is thrown if only the hash is
+specified and the hash has not been seen before""" hash:String """Contents of the partial schema in SDL syntax, but may reference types
+that aren't defined in this document""" sdl:String}type PermissionError implements Error{message:String!}type PromoteSchemaError{code:PromoteSchemaErrorCode!message:String!}enum PromoteSchemaErrorCode{CANNOT_PROMOTE_SCHEMA_FOR_FEDERATED_GRAPH}type PromoteSchemaResponse{code:PromoteSchemaResponseCode!tag:SchemaTag!}enum PromoteSchemaResponseCode{NO_CHANGES_DETECTED PROMOTION_SUCCESS}union PromoteSchemaResponseOrError=PromoteSchemaError|PromoteSchemaResponse type Protobuf{json:String!object:Object!raw:Blob!text:String!}type Query{"""Account by ID""" account(id:ID!):Account """Retrieve account by billing provider identifier""" accountByBillingCode(id:ID!):Account """Whether an account ID is available for mutation{newAccount(id:)}""" accountIDAvailable(id:ID!):Boolean!"""All accounts""" allAccounts(search:String tier:BillingPlanTier):[Account!]"""All available plans""" allPlans:[BillingPlan!]!allPublicVariants:[GraphVariant!]"""All services""" allServices(search:String):[Service!]"""All timezones with their offsets from UTC""" allTimezoneOffsets:[TimezoneOffset!]!"""All users""" allUsers(search:String):[User!]"""Look up a plan by ID""" billingPlan(id:ID):BillingPlanV2 """All available plans""" billingPlans:[BillingPlanV2!]!"""If this is true, the user is an Apollo administrator who can ignore restrictions based purely on billing plan.""" canBypassPlanRestrictions:Boolean!diffSchemas(baseSchema:String!nextSchema:String!):[Change!]!"""Get the unsubscribe settings for a given email.""" emailPreferences(email:String!token:String!):EmailPreferences experimentalFeatures:GlobalExperimentalFeatures!"""Address of the Studio frontend.""" frontendUrlRoot:String!"""Access a graph by ID.""" graph(id:ID!):Service internalActiveCronJobs:[CronJob!]!internalAdminUsers:[InternalAdminUser!]internalUnresolvedCronExecutionFailures:[CronExecution!]!"""User or graph querying the API, null if not authenticated.""" me:Identity odysseyCertification(id:ID!):OdysseyCertification operationCollection(id:ID!):OperationCollectionResult!operationCollectionEntries(collectionEntryIds:[ID!]!):[OperationCollectionEntry!]!"""Access an organization by ID.""" organization(id:ID!):Account """Look up a plan by ID""" plan(id:ID):BillingPlan """A list of public variants that have been selected to be shown on our Graph Directory.""" publiclyListedVariants:[GraphVariant!]"""Service by ID""" service(id:ID!):Service """Query statistics across all services. For admins only; normal users must go through AccountsStatsWindow or ServiceStatsWindow.""" stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):StatsWindow!"""Get the studio settings for the current user""" studioSettings:UserSettings """The plan started by AccountMutation.startTeamSubscription""" teamBillingPlan(billingPeriod:BillingPeriod!):BillingPlanV2!"""The plan started by AccountMutation.startTeamSubscription""" teamPlan(billingPeriod:BillingPeriod!):BillingPlan!"""Schema transformation for the Apollo platform API. Renames types. Internal to Apollo.""" transformSchemaForPlatformApi(baseSchema:GraphQLDocument!):GraphQLDocument """The plan started by AccountMutation.startTrial""" trialBillingPlan:BillingPlanV2!"""The plan started by AccountMutation.startTrial""" trialPlan:BillingPlan!"""User by ID""" user(id:ID!):User """Access a variant by reference of the form `graphID@variantName`, or `graphID` for the default `current` variant.
+Returns null when the graph or variant do not exist, or when the graph cannot be accessed.
+Note that we can return more types implementing Error in the future.""" variant(ref:ID!):GraphVariantLookup}"""query documents to validate against""" input QueryDocumentInput{document:String}type QueryPlan{json:String!object:Object!text:String!}"""Columns of QueryStats.""" enum QueryStatsColumn{ACCOUNT_ID CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type QueryStatsDimensions{accountId:ID clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in QueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input QueryStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[QueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:QueryStatsFilterIn not:QueryStatsFilter or:[QueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in QueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input QueryStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type QueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input QueryStatsOrderBySpec{column:QueryStatsColumn!direction:Ordering!}type QueryStatsRecord{"""Dimensions of QueryStats that can be grouped by.""" groupBy:QueryStatsDimensions!"""Metrics of QueryStats that can be aggregated over.""" metrics:QueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Query Trigger""" type QueryTrigger implements ChannelSubscription{channels:[Channel!]!comparisonOperator:ComparisonOperator!enabled:Boolean!excludedOperationNames:[String!]!id:ID!metric:QueryTriggerMetric!operationNames:[String!]!percentile:Float scope:QueryTriggerScope!serviceId:String!state:QueryTriggerState!threshold:Float!variant:String window:QueryTriggerWindow!}"""Query trigger""" input QueryTriggerInput{channelIds:[String!]comparisonOperator:ComparisonOperator!enabled:Boolean excludedOperationNames:[String!]metric:QueryTriggerMetric!operationNames:[String!]percentile:Float scope:QueryTriggerScope threshold:Float!variant:String window:QueryTriggerWindow!}enum QueryTriggerMetric{"""Number of requests within the window that resulted in an error. Ignores `percentile`.""" ERROR_COUNT """Number of error requests divided by total number of requests. Ignores `percentile`.""" ERROR_PERCENTAGE """Number of requests within the window. Ignores `percentile`.""" REQUEST_COUNT """Request latency in ms. Requires `percentile`.""" REQUEST_SERVICE_TIME}enum QueryTriggerScope{ALL ANY UNRECOGNIZED}"""Query trigger state""" type QueryTriggerState{evaluatedAt:Timestamp!lastTriggeredAt:Timestamp operations:[QueryTriggerStateOperation!]!triggered:Boolean!}type QueryTriggerStateOperation{count:Long!operation:String!triggered:Boolean!value:Float!}enum QueryTriggerWindow{FIFTEEN_MINUTES FIVE_MINUTES ONE_MINUTE UNRECOGNIZED}"""The documentation for a graph variant, as display in Studio.""" type Readme{"""Content of the document.""" content:String!id:ID!"""Last time the document was updated.""" lastUpdatedAt:Timestamp!"""Identity of who updated the document last.""" lastUpdatedBy:Identity}type RegisterOperationsMutationResponse{invalidOperations:[InvalidOperation!]newOperations:[RegisteredOperation!]registrationSuccess:Boolean!}input RegisteredClientIdentityInput{identifier:String!name:String!version:String}type RegisteredOperation{signature:ID!}input RegisteredOperationInput{document:String metadata:RegisteredOperationMetadataInput signature:ID!}input RegisteredOperationMetadataInput{"""This will be used to link existing records in Engine to a new ID.""" engineSignature:String}type RegistryApiKey{keyName:String token:String!}type RegistryStatsWindow{schemaCheckStats:[AccountChecksStatsRecord!]!schemaPublishStats:[AccountPublishesStatsRecord!]!}type RegistrySubscription implements ChannelSubscription{channel:Channel channels:[Channel!]!@deprecated(reason:"Use channels list instead")createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!options:SubscriptionOptions!variant:String}type RelaunchComplete{latestLaunch:Launch!updated:Boolean!}type RelaunchError{message:String!}union RelaunchResult=RelaunchComplete|RelaunchError union RemoveOperationCollectionEntryResult=OperationCollection|PermissionError union RemoveOperationCollectionFromVariantResult=GraphVariant|NotFoundError|PermissionError|ValidationError union ReorderOperationCollectionResult=OperationCollection|PermissionError type ReportSchemaError implements ReportSchemaResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withCoreSchema:Boolean!}enum ReportSchemaErrorCode{BOOT_ID_IS_NOT_VALID_UUID BOOT_ID_IS_REQUIRED CORE_SCHEMA_HASH_IS_NOT_SCHEMA_SHA256 CORE_SCHEMA_HASH_IS_REQUIRED CORE_SCHEMA_HASH_IS_TOO_LONG EXECUTABLE_SCHEMA_ID_IS_NOT_SCHEMA_SHA256 EXECUTABLE_SCHEMA_ID_IS_REQUIRED EXECUTABLE_SCHEMA_ID_IS_TOO_LONG GRAPH_REF_INVALID_FORMAT GRAPH_REF_IS_REQUIRED GRAPH_VARIANT_DOES_NOT_MATCH_REGEX GRAPH_VARIANT_IS_REQUIRED LIBRARY_VERSION_IS_TOO_LONG PLATFORM_IS_TOO_LONG RUNTIME_VERSION_IS_TOO_LONG SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID SERVER_ID_IS_TOO_LONG USER_VERSION_IS_TOO_LONG}type ReportSchemaResponse implements ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}interface ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}type ReportServerInfoError implements ReportServerInfoResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withExecutableSchema:Boolean!}type ReportServerInfoResponse implements ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}interface ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}enum Resolution{R1D R1H R1M R5M R6H R15M}enum ResponseHints{NONE SAMPLE_RESPONSES SUBGRAPHS TIMINGS TRACE_TIMINGS}type RoleOverride{graph:Service!lastUpdatedAt:Timestamp!role:UserPermission!user:User!}input RoverArgumentInput{key:String!value:Object}"""SHA-256 hash, represented in lowercase hexadecimal""" scalar SHA256 type ScheduledSummary implements ChannelSubscription{channel:Channel@deprecated(reason:"Use channels list instead")channels:[Channel!]!enabled:Boolean!id:ID!timezone:String!variant:String!}"""A GraphQL schema document, which may optionally map back to context with which the schema was ingested.""" type Schema{createTemporaryURL(expiresInSeconds:Int!=86400):TemporaryURL """The timestamp of initial ingestion of a schema to a graph.""" createdAt:Timestamp!"""The raw GraphQL document for the schema in question""" document:GraphQLDocument!"""The number of fields; this includes user defined fields only, excluding built-in types and fields""" fieldCount:Int!gitContext:GitContext """The hex representation of the SHA256 of the GraphQL document.""" hash:ID!introspection:IntrospectionSchema!"""The number of types; this includes user defined types only, excluding built-in types""" typeCount:Int!}"""Represents an error from running schema composition on a list of subgraph definitions.""" type SchemaCompositionError{"""A machine-readable error code.""" code:String """Affected locations.""" locations:[SourceLocation]!"""A human-readable locations.""" message:String!}"""The difference between two schemas, as usually computed during a schema check.""" type SchemaDiff{"""Clients affected by all changes in the diff.""" affectedClients:[AffectedClient!]@deprecated(reason:"Unsupported.")"""Operations affected by all changes in the diff.""" affectedQueries:[AffectedQuery!]"""Numeric summary of all changes in the diff.""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations.""" changes:[Change!]!"""Number of affected query operations that are neither marked as safe or ignored.""" numberOfAffectedOperations:Int!"""Number of operations that were validated during the check.""" numberOfCheckedOperations:Int """Indication of the success of the change; either failure, warning, or notice.""" severity:ChangeSeverity!"""The tag against which this diff was created""" tag:String type:ChangeType!@deprecated(reason:"use severity instead")"""Configuration of validation""" validationConfig:SchemaDiffValidationConfig}type SchemaDiffValidationConfig{"""Clients to ignore during validation.""" excludedClients:[ClientInfoFilterOutput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilter]"""delta in seconds from current time that determines the start of the window
+for reported metrics included in a schema diff. A day window from the present
+day would have a `from` value of -86400. In rare cases, this could be an ISO
+timestamp if the user passed one in on diff creation""" from:Timestamp """Operation IDs to ignore during validation.""" ignoredOperations:[ID!]"""Variants to include during validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int """Number of requests within the window for a query to be considered, relative to
+total request count. Expected values are between 0 and 0.05 (minimum 5% of
+total request volume)""" queryCountThresholdPercentage:Float """delta in seconds from current time that determines the end of the
+window for reported metrics included in a schema diff. A day window
+from the present day would have a `to` value of -0. In rare
+cases, this could be an ISO timestamp if the user passed one in on diff
+creation""" to:Timestamp}type SchemaPublishSubscription implements ChannelSubscription{channels:[Channel!]!createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!variant:String}input SchemaReport{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""The hex SHA256 hash of the schema being reported. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchemaHash:String!"""The graph ref (eg, 'id@variant')""" graphRef:String!"""The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""A specific publication of a graph variant.""" type SchemaTag{"""The result of composition, including either a supergraph schema or errors,
+executed during this publication. Only available with managed federation.""" compositionResult:CompositionResult createdAt:Timestamp!"""Differences with the schema from the previous successful publication.""" diffToPrevious:SchemaDiff gitContext:GitContext """List of previously uploaded SchemaTags under the same tag name, starting with
+the selected published schema record. Sorted in reverse chronological order
+by creation date (newest publish first).
+
+Note: This does not include the history of checked schemas""" history(includeUnchanged:Boolean!=true limit:Int!=3 offset:Int=0):[SchemaTag!]!"""Number of tagged schemas created under the same tag name.
+Also represents the maximum size of the history's limit argument.""" historyLength:Int!"""Number of schemas tagged prior to this one under the same tag name, its position
+in the tag history.""" historyOrder:Int!"""The identifier for this specific publication.""" id:ID!"""Time of publication.""" publishedAt:Timestamp!"""The Identity that published this schema and their client info, or null if this isn't
+a publish. Sub-fields may be null if they weren't recorded.""" publishedBy:IdentityAndClientInfo """Indicates the schemaTag of the schema's original upload, null if this is the
+first upload of the schema.""" reversionFrom:SchemaTag """The published schema.""" schema:Schema!slackNotificationBody(graphDisplayName:String!):String tag:String!@deprecated(reason:"Please use variant { name } instead")"""The graph variant this belongs to.""" variant:GraphVariant!webhookNotificationBody:String!}"""How many seats of the given types does an organization have (regardless of plan type)?""" type Seats{"""How many members that are free in this organization.""" free:Int!"""How many members that are not free in this organization.""" fullPrice:Int!}type SemanticChange{"""Target arg of change made.""" argNode:NamedIntrospectionArg """Node related to the top level node that was changed, such as a field in an object,
+a value in an enum or the object of an interface""" childNode:NamedIntrospectionValue """Semantic metadata about the type of change""" definition:ChangeDefinition!"""Top level node affected by the change""" parentNode:NamedIntrospectionType}"""A graph in Apollo Studio represents a graph in your organization.
+Each graph has one or more variants, which correspond to the different environments where that graph runs (such as staging and production).
+Each variant has its own GraphQL schema, which means schemas can differ between environments.""" type Service implements Identity{"""Organization that this graph belongs to.""" account:Account accountId:ID apiKeys:[GraphApiKey!]"""A view of the identity as an Actor type.""" asActor:Actor!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request
+with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the
+browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the
+filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to
+MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the service's avatar. Note that CORS is not enabled for these URLs. The size
+argument is used for bandwidth reduction, and should be the size of the image as displayed in the
+application. Apollo's media server will downscale larger images to at least the requested size,
+but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String """Get available notification endpoints""" channels(channelIds:[ID!]):[Channel!]"""Get check configuration for this graph.""" checkConfiguration:CheckConfiguration """Get a check workflow for this graph by its ID""" checkWorkflow(id:ID!):CheckWorkflow """Get check workflows for this graph ordered by creation time, most recent first.""" checkWorkflows(filter:CheckFilterInput limit:Int!=100):[CheckWorkflow!]!"""List of options available for filtering checks for this graph by author.
+If a filter is passed, constrains results to match the filter.""" checksAuthorOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by branch.
+If a filter is passed, constrains results to match the filter.""" checksBranchOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by subgraph name.
+If a filter is passed, constrains results to match the filter.""" checksSubgraphOptions(filter:CheckFilterInput):[String!]!"""Given a graphCompositionID, return the results of composition. This can represent either a validation or a publish.""" compositionResultById(id:ID!):CompositionResult createdAt:Timestamp!createdBy:Identity datadogMetricsConfig:DatadogMetricsConfig deletedAt:Timestamp description:String devGraphOwner:User """Get a GraphQL document by hash""" document(hash:SHA256):GraphQLDocument """When this is true, this graph will be hidden from non-admin members of the org who haven't been explicitly assigned a
+role on this graph.""" hiddenFromUninvitedNonAdminAccountMembers:Boolean!"""Globally unique identifier for this graph.""" id:ID!"""List of subgraphs that comprise a graph. A non-federated graph should have a single implementing service.
+Set includeDeleted to see deleted subgraphs.""" implementingServices(graphVariant:String!includeDeleted:Boolean):GraphImplementors lastReportedAt(graphVariant:String):Timestamp """Current identity, null if not authenticated.""" me:Identity """The composition result that was most recently published to a graph variant.""" mostRecentCompositionPublish(graphVariant:String!):CompositionPublishResult """Permissions of the current user in this graph.""" myRole:UserPermission """Name of this graph. Note that this field is deprecated.""" name:String!@deprecated(reason:"Use Service.title")operation(id:ID!):Operation """Gets the operations and their approved changes for this graph, checkID, and operationID.""" operationsAcceptedChanges(checkID:ID!operationID:String!):[OperationAcceptedChange!]!"""Get an operations check result for a specific check ID""" operationsCheck(checkID:ID!):OperationsCheckResult """Get query triggers for a given variant. If variant is null all the triggers for this service will be gotten.""" queryTriggers(graphVariant:String operationNames:[String!]):[QueryTrigger!]readme:Readme """Registry specific stats for this graph.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """Whether registry subscriptions (with any options) are enabled. If variant is not passed, returns true if configuration is present for any variant""" registrySubscriptionsEnabled(graphVariant:String):Boolean!@deprecated(reason:"This field will be removed")reportingEnabled:Boolean!"""The list of members that can access this graph, accounting for graph role overrides""" roleOverrides:[RoleOverride!]"""Which permissions the current user has for interacting with this graph""" roles:ServiceRoles scheduledSummaries:[ScheduledSummary!]!"""Get a schema by hash or current tag""" schema(hash:ID tag:String):Schema """The current publish associated to a given variant (with 'tag' as the variant name).""" schemaTag(tag:String!):SchemaTag schemaTagById(id:ID!):SchemaTag """Get schema tags, with optional filtering to a set of tags. Always sorted by creation
+date in reverse chronological order.""" schemaTags(tags:[String!]):[SchemaTag!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow!@deprecated(reason:"use Service.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow """Generate a test schema publish notification body""" testSchemaPublishBody(variant:String!):String!"""Name of this graph.""" title:String!trace(id:ID!):Trace traceStorageEnabled:Boolean!"""A particular variant often representing a live traffic environment (such as "dev", "staging", or "prod").
+Each variant can represent a specific URL or destination to query at, analytics, and its own schema history.
+Pass in a name to get a specific variant. Use `Graph.variants` to get a list of variants.""" variant(name:String!):GraphVariant """The list of variants that exist for this graph""" variants:[GraphVariant!]!}"""Columns of ServiceBillingUsageStats.""" enum ServiceBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG TIMESTAMP}type ServiceBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilter{and:[ServiceBillingUsageStatsFilter!]in:ServiceBillingUsageStatsFilterIn not:ServiceBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[ServiceBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceBillingUsageStatsMetrics{operationCount:Long!}input ServiceBillingUsageStatsOrderBySpec{column:ServiceBillingUsageStatsColumn!direction:Ordering!}type ServiceBillingUsageStatsRecord{"""Dimensions of ServiceBillingUsageStats that can be grouped by.""" groupBy:ServiceBillingUsageStatsDimensions!"""Metrics of ServiceBillingUsageStats that can be aggregated over.""" metrics:ServiceBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceEdgeServerInfos.""" enum ServiceEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID TIMESTAMP USER_VERSION}type ServiceEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilter{and:[ServiceEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:ServiceEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:ServiceEdgeServerInfosFilter or:[ServiceEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input ServiceEdgeServerInfosOrderBySpec{column:ServiceEdgeServerInfosColumn!direction:Ordering!}type ServiceEdgeServerInfosRecord{"""Dimensions of ServiceEdgeServerInfos that can be grouped by.""" groupBy:ServiceEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceErrorStats.""" enum ServiceErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilter{and:[ServiceErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceErrorStatsFilterIn not:ServiceErrorStatsFilter or:[ServiceErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ServiceErrorStatsOrderBySpec{column:ServiceErrorStatsColumn!direction:Ordering!}type ServiceErrorStatsRecord{"""Dimensions of ServiceErrorStats that can be grouped by.""" groupBy:ServiceErrorStatsDimensions!"""Metrics of ServiceErrorStats that can be aggregated over.""" metrics:ServiceErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldLatencies.""" enum ServiceFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilter{and:[ServiceFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldLatenciesFilterIn not:ServiceFieldLatenciesFilter or:[ServiceFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input ServiceFieldLatenciesOrderBySpec{column:ServiceFieldLatenciesColumn!direction:Ordering!}type ServiceFieldLatenciesRecord{"""Dimensions of ServiceFieldLatencies that can be grouped by.""" groupBy:ServiceFieldLatenciesDimensions!"""Metrics of ServiceFieldLatencies that can be aggregated over.""" metrics:ServiceFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldUsage.""" enum ServiceFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilter{and:[ServiceFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldUsageFilterIn not:ServiceFieldUsageFilter or:[ServiceFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input ServiceFieldUsageOrderBySpec{column:ServiceFieldUsageColumn!direction:Ordering!}type ServiceFieldUsageRecord{"""Dimensions of ServiceFieldUsage that can be grouped by.""" groupBy:ServiceFieldUsageDimensions!"""Metrics of ServiceFieldUsage that can be aggregated over.""" metrics:ServiceFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Mutations to a graph.""" type ServiceMutation{"""Check a proposed subgraph schema change.
+If the proposal composes successfully, perform a usage check for the resulting schema.""" checkPartialSchema("""Deprecated and ignored.""" frontend:String gitContext:GitContextInput """Specifies which variant of a graph this mutation operates on.""" graphVariant:String!historicParameters:HistoricQueryParameters """Name of the implementing service to validate the partial schema against""" implementingServiceName:String!"""If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """The partial schema to validate against an implementing service""" partialSchema:PartialSchemaInput!"""Whether to use the maximum retention for historical validation. This only takes
+effect if historicParameters is null.""" useMaximumRetention:Boolean):CheckPartialSchemaResult!"""Checks a proposed schema against the schema that has been published to
+a particular variant, using metrics corresponding to `historicParameters`.
+Callers can set `historicParameters` directly or rely on defaults set in the
+graph's check configuration (7 days by default).
+If they do not set `historicParameters` but set `useMaximumRetention`,
+validation will use the maximum retention the graph has access to.""" checkSchema(baseSchemaTag:String="current" """Deprecated and ignored.""" frontend:String gitContext:GitContextInput historicParameters:HistoricQueryParameters """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """Only one of proposedSchema, proposedSchemaDocument, and proposedSchemaHash
+may be specified""" proposedSchema:IntrospectionSchemaInput proposedSchemaDocument:String proposedSchemaHash:String useMaximumRetention:Boolean):CheckSchemaResult!"""Make changes to a check workflow.""" checkWorkflow(id:ID!):CheckWorkflowMutation createCompositionStatusSubscription("""ID of Slack channel for registry notification.""" channelID:ID!"""Variant to notify on.""" variant:String!):SchemaPublishSubscription!createSchemaPublishSubscription("""ID of Slack channel for registry notification.""" channelID:ID!"""Variant to notify on.""" variant:String!):SchemaPublishSubscription!"""Soft delete a graph. Data associated with the graph is not permanently deleted; Apollo support can undo.""" delete:Void """Delete the service's avatar. Requires Service.roles.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Delete an existing channel""" deleteChannel(id:ID!):Boolean!"""Delete an existing query trigger""" deleteQueryTrigger(id:ID!):Boolean!"""Deletes this service's current subscriptions specific to the ID, returns true if it existed""" deleteRegistrySubscription(id:ID!):Boolean!"""Deletes this service's current registry subscription(s) specific to its graph variant,
+returns a list of subscription IDs that were deleted.""" deleteRegistrySubscriptions(variant:String!):[ID!]!deleteScheduledSummary(id:ID!):Boolean!"""Delete a variant by name.""" deleteSchemaTag(tag:String!):DeleteSchemaTagResult!"""Given a UTC timestamp, delete all traces associated with this Service, on that corresponding day. If a timestamp to is provided, deletes all days inclusive.""" deleteTraces(from:Timestamp!to:Timestamp):Void disableDatadogForwardingLegacyMetricNames:Service """Hard delete a graph and all data associated with it. Its ID cannot be reused.""" hardDelete:Void id:ID!@deprecated(reason:"Use service.id")"""Ignore an operation in future checks;
+changes affecting it will be tracked,
+but won't affect the outcome of the check.
+Returns true if the operation is newly ignored,
+false if it already was.""" ignoreOperationsInChecks(ids:[ID!]!):IgnoreOperationsInChecksResult """Mark the changeset that affects an operation in a given check instance as safe.
+Note that only operations marked as behavior changes are allowed to be marked as safe.""" markChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to accept changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!newKey(keyName:String role:UserPermission!=GRAPH_ADMIN):GraphApiKey!"""Adds an override to the given users permission for this graph""" overrideUserPermission(permission:UserPermission userID:ID!):Service """Returns a preview of the Core and API schema contracts derived from a source variant and a set of filter configurations""" previewContractVariant(filterConfig:FilterConfigInput!sourceVariant:String!):ContractVariantPreviewResult!@deprecated(reason:"use GraphVariant.contractPreview instead")"""Promote the schema with the given SHA-256 hash to active for the given variant/tag.""" promoteSchema(graphVariant:String!historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false sha256:SHA256!):PromoteSchemaResponseOrError!"""Publish to a subgraph. If composition is successful, this will update running routers.""" publishSubgraph(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult registerOperationsWithResponse(clientIdentity:RegisteredClientIdentityInput gitContext:GitContextInput """Specifies which variant of a graph these operations belong to.
+Formerly known as "tag"
+Defaults to "current"
+""" graphVariant:String!="current" manifestVersion:Int operations:[RegisteredOperationInput!]!):RegisterOperationsMutationResponse """Removes a subgraph. If composition is successful, this will update running routers.""" removeImplementingServiceAndTriggerComposition("""Do not remove the service, but recompose without it and report any errors.""" dryRun:Boolean!=false graphVariant:String!name:String!):CompositionAndRemoveResult!removeKey("""API key ID""" id:ID):Void renameKey(id:ID!newKeyName:String):GraphApiKey reportServerInfo("""Only sent if previously requested i.e. received ReportServerInfoResult with withExecutableSchema = true. An executable schema is a schema document that describes the full GraphQL schema that an external client could execute queries against. This must be a valid GraphQL schema document, as per the GraphQL specification: https://spec.graphql.org/""" executableSchema:String """Information about the edge server, see descriptions for individual fields.""" info:EdgeServerInfo!):ReportServerInfoResult@deprecated(reason:"use Mutation.reportSchema instead")service:Service!"""Store a given schema document. This schema will be attached to the graph but
+not be associated with any variant. On success, returns the schema hash.""" storeSchemaDocument(schemaDocument:String!):StoreSchemaResponseOrError!"""Test Slack notification channel""" testSlackChannel(id:ID!notification:SlackNotificationInput!):Void testSubscriptionForChannel(channelID:ID!subscriptionID:ID!):String!transfer(to:String!):Service triggerRepublish(graphVariant:String!):Void undelete:Service """Revert the effects of ignoreOperation.
+Returns true if the operation is no longer ignored,
+false if it wasn't.""" unignoreOperationsInChecks(ids:[ID!]!):UnignoreOperationsInChecksResult """Unmark changes for an operation as safe.""" unmarkChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to unmark changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!"""Update schema check configuration for a graph.""" updateCheckConfiguration("""Clients to ignore during validation.""" excludedClients:[ClientFilterInput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilterInput!]"""Operations to ignore during validation.""" excludedOperations:[ExcludedOperationInput!]"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean """Variant overrides for validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" operationCountThreshold:Int """Number of requests within the window for a query to be considered, relative to
+total request count. Expected values are between 0 and 0.05 (minimum 5% of
+total request volume)""" operationCountThresholdPercentage:Float """Only check operations from the last <timeRangeSeconds> seconds. The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long):CheckConfiguration!updateDatadogMetricsConfig(apiKey:String apiRegion:DatadogApiRegion enabled:Boolean):DatadogMetricsConfig updateDescription(description:String!):Service """Update hiddenFromUninvitedNonAdminAccountMembers""" updateHiddenFromUninvitedNonAdminAccountMembers(hiddenFromUninvitedNonAdminAccountMembers:Boolean!):Service updateReadme(readme:String!):Service updateTitle(title:String!):Service """Publish a schema to this variant, either via a document or an introspection query result.""" uploadSchema(errorOnBadRequest:Boolean!=true gitContext:GitContextInput historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false schema:IntrospectionSchemaInput schemaDocument:String tag:String!):UploadSchemaMutationResponse upsertChannel(id:ID pagerDutyChannel:PagerDutyChannelInput slackChannel:SlackChannelInput webhookChannel:WebhookChannelInput):Channel """Creates a contract schema from a source variant and a set of filter configurations""" upsertContractVariant(contractVariantName:String!filterConfig:FilterConfigInput!initiateLaunch:Boolean!=true sourceVariant:String!):ContractVariantUpsertResult!"""Publish to a subgraph. If composition is successful, this will update running routers.""" upsertImplementingServiceAndTriggerComposition(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult """Create/update PagerDuty notification channel""" upsertPagerDutyChannel(channel:PagerDutyChannelInput!id:ID):PagerDutyChannel upsertQueryTrigger(id:ID trigger:QueryTriggerInput!):QueryTrigger """Create or update a subscription for a service.""" upsertRegistrySubscription("""ID of Slack channel for registry notification.""" channelID:ID """ID of registry subscription""" id:ID """Set of options/customization for notification.""" options:SubscriptionOptionsInput """Variant to notify on.""" variant:String):RegistrySubscription!upsertScheduledSummary(channelID:ID enabled:Boolean id:ID """Deprecated, use the 'variant' argument instead""" tag:String timezone:String variant:String):ScheduledSummary """Create/update Slack notification channel""" upsertSlackChannel(channel:SlackChannelInput!id:ID):SlackChannel upsertWebhookChannel(id:ID name:String secretToken:String url:String!):WebhookChannel validateOperations(gitContext:GitContextInput operations:[OperationDocumentInput!]!tag:String="current"):ValidateOperationsResult!"""This mutation will not result in any changes to the implementing service
+Run composition with the Implementing Service's partial schema replaced with the one provided
+in the mutation's input. Store the composed schema, return the hash of the composed schema,
+and any warnings and errors pertaining to composition.
+This mutation will not run validation against operations.""" validatePartialSchemaOfImplementingServiceAgainstGraph(graphVariant:String!implementingServiceName:String!partialSchema:PartialSchemaInput!):CompositionValidationResult!"""Make changes to a graph variant.""" variant(name:String!):GraphVariantMutation}"""Columns of ServiceOperationCheckStats.""" enum ServiceOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG TIMESTAMP UNCACHED_REQUESTS_COUNT}type ServiceOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilter{and:[ServiceOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceOperationCheckStatsFilterIn not:ServiceOperationCheckStatsFilter or:[ServiceOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input ServiceOperationCheckStatsOrderBySpec{column:ServiceOperationCheckStatsColumn!direction:Ordering!}type ServiceOperationCheckStatsRecord{"""Dimensions of ServiceOperationCheckStats that can be grouped by.""" groupBy:ServiceOperationCheckStatsDimensions!"""Metrics of ServiceOperationCheckStats that can be aggregated over.""" metrics:ServiceOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceQueryStats.""" enum ServiceQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type ServiceQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String}"""Filter for data in ServiceQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilter{and:[ServiceQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:ServiceQueryStatsFilterIn not:ServiceQueryStatsFilter or:[ServiceQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input ServiceQueryStatsOrderBySpec{column:ServiceQueryStatsColumn!direction:Ordering!}type ServiceQueryStatsRecord{"""Dimensions of ServiceQueryStats that can be grouped by.""" groupBy:ServiceQueryStatsDimensions!"""Metrics of ServiceQueryStats that can be aggregated over.""" metrics:ServiceQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A map from role (permission) String to boolean that the current user is allowed for a particular graph.""" type ServiceRoles{"""Whether the currently authenticated user is permitted to perform schema checks (i.e. run `rover (sub)graph check`).""" canCheckSchemas:Boolean!"""Whether the currently authenticated user is permitted to create new graph variants.""" canCreateVariants:Boolean!"""Whether the currently authenticated user is permitted to delete the graph in question""" canDelete:Boolean!"""Whether the currently authenticated user is permitted to manage user access to the graph in question.""" canManageAccess:Boolean!"""Whether the currently authenticated user is permitted to manage the build configuration (e.g. build pipeline version).""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to manage 3rd party integrations (e.g. Datadog forwarding).""" canManageIntegrations:Boolean!"""Whether the currently authenticated user is permitted to manage graph-level API keys.""" canManageKeys:Boolean!"""Whether the currently authenticated user is permitted to perform basic administration (e.g. set to public) over variants.""" canManageVariants:Boolean!"""Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version).""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to view details of the check configuration for this graph.""" canQueryCheckConfiguration:Boolean!canQueryDeletedImplementingServices:Boolean!"""Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of.""" canQueryImplementingServices:Boolean!canQueryIntegrations:Boolean!canQueryPrivateInfo:Boolean!canQueryPublicInfo:Boolean!canQueryReadmeAuthor:Boolean!canQueryRoleOverrides:Boolean!"""Whether the currently authenticated user is permitted to download schemas owned by this graph.""" canQuerySchemas:Boolean!canQueryStats:Boolean!canQueryTokens:Boolean!canQueryTraces:Boolean!"""Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph.""" canRegisterOperations:Boolean!canStoreSchemasWithoutVariant:Boolean!canUndelete:Boolean!canUpdateAvatar:Boolean!canUpdateDescription:Boolean!canUpdateTitle:Boolean!canVisualizeStats:Boolean!@deprecated(reason:"Replaced with canQueryTraces and canQueryStats")"""Whether the currently authenticated user is permitted to make updates to the check configuration for this graph.""" canWriteCheckConfiguration:Boolean!canWriteTraces:Boolean!@deprecated(reason:"Never worked, not replaced")}"""A time window with a specified granularity over a given service.""" type ServiceStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:ServiceBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceBillingUsageStatsOrderBySpec!]):[ServiceBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:ServiceEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceEdgeServerInfosOrderBySpec!]):[ServiceEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ServiceErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceErrorStatsOrderBySpec!]):[ServiceErrorStatsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldStats("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:ServiceFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldUsageOrderBySpec!]):[ServiceFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:ServiceOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceOperationCheckStatsOrderBySpec!]):[ServiceOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:ServiceQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceQueryStatsOrderBySpec!]):[ServiceQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:ServiceTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTracePathErrorsRefsOrderBySpec!]):[ServiceTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:ServiceTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTraceRefsOrderBySpec!]):[ServiceTraceRefsRecord!]!}"""Columns of ServiceTracePathErrorsRefs.""" enum ServiceTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type ServiceTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in ServiceTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilter{and:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:ServiceTracePathErrorsRefsFilterIn not:ServiceTracePathErrorsRefsFilter or:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input ServiceTracePathErrorsRefsOrderBySpec{column:ServiceTracePathErrorsRefsColumn!direction:Ordering!}type ServiceTracePathErrorsRefsRecord{"""Dimensions of ServiceTracePathErrorsRefs that can be grouped by.""" groupBy:ServiceTracePathErrorsRefsDimensions!"""Metrics of ServiceTracePathErrorsRefs that can be aggregated over.""" metrics:ServiceTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceTraceRefs.""" enum ServiceTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type ServiceTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String traceId:ID}"""Filter for data in ServiceTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilter{and:[ServiceTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:ServiceTraceRefsFilterIn not:ServiceTraceRefsFilter or:[ServiceTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input ServiceTraceRefsOrderBySpec{column:ServiceTraceRefsColumn!direction:Ordering!}type ServiceTraceRefsRecord{"""Dimensions of ServiceTraceRefs that can be grouped by.""" groupBy:ServiceTraceRefsDimensions!"""Metrics of ServiceTraceRefs that can be aggregated over.""" metrics:ServiceTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Slack notification channel""" type SlackChannel implements Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!url:String!}"""Slack notification channel parameters""" input SlackChannelInput{name:String url:String!}input SlackNotificationField{key:String!value:String!}"""Slack notification message""" input SlackNotificationInput{color:String fallback:String!fields:[SlackNotificationField!]iconUrl:String text:String timestamp:Timestamp title:String titleLink:String username:String}type SourceLocation{column:Int!line:Int!}"""A time window with a specified granularity.""" type StatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:BillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order BillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[BillingUsageStatsOrderBySpec!]):[BillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:EdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order EdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[EdgeServerInfosOrderBySpec!]):[EdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ErrorStatsOrderBySpec!]):[ErrorStatsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:FieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldLatenciesOrderBySpec!]):[FieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:FieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldUsageOrderBySpec!]):[FieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:OperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order OperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[OperationCheckStatsOrderBySpec!]):[OperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:QueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order QueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[QueryStatsOrderBySpec!]):[QueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:TracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TracePathErrorsRefsOrderBySpec!]):[TracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:TraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TraceRefsOrderBySpec!]):[TraceRefsRecord!]!}type StoreSchemaError{code:StoreSchemaErrorCode!message:String!}enum StoreSchemaErrorCode{SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID}type StoreSchemaResponse{sha256:SHA256!}union StoreSchemaResponseOrError=StoreSchemaError|StoreSchemaResponse type StoredApprovedChange{argNode:NamedIntrospectionArgNoDescription childNode:NamedIntrospectionValueNoDescription code:ChangeCode!parentNode:NamedIntrospectionTypeNoDescription}scalar StringOrInt type StringToString{key:String!value:String!}input StringToStringInput{key:String!value:String!}type Subgraph{hash:String!name:String!routingURL:String!}type SubgraphChange{name:ID!type:SubgraphChangeType!}enum SubgraphChangeType{ADDITION DELETION MODIFICATION}type SubgraphConfig{id:ID!name:String!schemaHash:String!sdl:String!url:String!}type SubscriptionOptions{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}input SubscriptionOptionsInput{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}enum SubscriptionState{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}enum SubscriptionStateV2{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}type TemporaryURL{url:String!}enum ThemeName{DARK LIGHT}enum TicketPriority{P0 P1 P2 P3}enum TicketStatus{CLOSED HOLD NEW OPEN PENDING SOLVED}"""ISO 8601, extended format with nanoseconds, Zulu (or '[+-]seconds' for times relative to now)""" scalar Timestamp type TimezoneOffset{minutesOffsetFromUTC:Int!zoneID:String!}"""Counts of changes.""" type TotalChangeSummaryCounts{"""Number of changes that are additions. This includes adding types, adding fields to object, input
+object, and interface types, adding values to enums, adding members to interfaces and unions, and
+adding arguments.""" additions:Int!"""Number of changes that are new usages of the @deprecated directive.""" deprecations:Int!"""Number of changes that are edits. This includes types changing kind, fields and arguments
+changing type, arguments changing default value, and any description changes. This also includes
+edits to @deprecated reason strings.""" edits:Int!"""Number of changes that are removals. This includes removing types, removing fields from object,
+input object, and interface types, removing values from enums, removing members from interfaces
+and unions, and removing arguments. This also includes removing @deprecated usages.""" removals:Int!}type Trace{cacheMaxAgeMs:Float cacheScope:CacheScope clientName:String clientVersion:String durationMs:Float!endTime:Timestamp!http:TraceHTTP id:ID!operationName:String protobuf:Protobuf!root:TraceNode!signature:String!startTime:Timestamp!variablesJSON:[StringToString!]!}type TraceError{json:String!locations:[TraceSourceLocation!]!message:String!timestamp:Timestamp}type TraceHTTP{host:String method:HTTPMethod!path:String protocol:String requestHeaders:[StringToString!]!responseHeaders:[StringToString!]!secure:Boolean!statusCode:Int!}type TraceNode{cacheMaxAgeMs:Float cacheScope:CacheScope children:[TraceNode!]!childrenIds:[ID!]!descendants:[TraceNode!]!descendantsIds:[ID!]!endTime:Timestamp!errors:[TraceError!]!id:ID!key:StringOrInt originalFieldName:String parent:ID!parentId:ID path:[String!]!startTime:Timestamp!type:String}"""Columns of TracePathErrorsRefs.""" enum TracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type TracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String """If metrics were collected from a federated service, this field will be prefixed with `service:<SERVICE_NAME>.`""" path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in TracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilter{and:[TracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:TracePathErrorsRefsFilterIn not:TracePathErrorsRefsFilter or:[TracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input TracePathErrorsRefsOrderBySpec{column:TracePathErrorsRefsColumn!direction:Ordering!}type TracePathErrorsRefsRecord{"""Dimensions of TracePathErrorsRefs that can be grouped by.""" groupBy:TracePathErrorsRefsDimensions!"""Metrics of TracePathErrorsRefs that can be aggregated over.""" metrics:TracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of TraceRefs.""" enum TraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type TraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in TraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TraceRefsFilter{and:[TraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:TraceRefsFilterIn not:TraceRefsFilter or:[TraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input TraceRefsOrderBySpec{column:TraceRefsColumn!direction:Ordering!}type TraceRefsRecord{"""Dimensions of TraceRefs that can be grouped by.""" groupBy:TraceRefsDimensions!"""Metrics of TraceRefs that can be aggregated over.""" metrics:TraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type TraceSourceLocation{column:Int!line:Int!}"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc.""" type TypeChangeSummaryCounts{"""Number of changes that are additions of types.""" additions:Int!"""Number of changes that are edits. This includes types changing kind and any type description
+changes, but also includes adding/removing values from enums, adding/removing members from
+interfaces and unions, and any enum value deprecation and description changes.""" edits:Int!"""Number of changes that are removals of types.""" removals:Int!}"""the TypeFilterConfig is used to isolate
+types, and subsequent fields, through
+various configuration settings.
+
+It defaults to filter towards user defined
+types only""" input TypeFilterConfig{"""include abstract types (interfaces and unions)""" includeAbstractTypes:Boolean=true """include built in scalars (i.e. Boolean, Int, etc)""" includeBuiltInTypes:Boolean=false """include reserved introspection types (i.e. __Type)""" includeIntrospectionTypes:Boolean=false}type URI{"""A GCS URI""" gcs:String!}type UnignoreOperationsInChecksResult{graph:Service!}union UpdateOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union UpdateOperationCollectionResult=OperationCollection|PermissionError|ValidationError """The result of a schema publish to a graph variant.""" type UploadSchemaMutationResponse{"""A response code for processing via machines (e.g. UPLOAD_SUCCESS or NO_CHANGES)""" code:String!"""Human readable result of a schema publish.""" message:String!"""If successful, the corresponding publication.""" publication:SchemaTag """Whether the schema publish successfully completed or encountered errors.""" success:Boolean!"""If successful, the corresponding publication.""" tag:SchemaTag}"""A registered user.""" type User implements Identity{acceptedPrivacyPolicyAt:Timestamp accounts:[Account!]!@deprecated(reason:"Replaced with User.memberships.account")apiKeys(includeCookies:Boolean=false):[UserApiKey!]!"""Translation of this user identity to an 'Actor' type.""" asActor:Actor!"""Get an URL to which an avatar image can be uploaded. Client uploads by sending a PUT request
+with the image data to MediaUploadInfo.url. Client SHOULD set the "Content-Type" header to the
+browser-inferred MIME type, and SHOULD set the "x-apollo-content-filename" header to the
+filename, if such information is available. Client MUST set the "x-apollo-csrf-token" header to
+MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image URL for the user's avatar. Note that CORS is not enabled for these URLs. The size
+argument is used for bandwidth reduction, and should be the size of the image as displayed in the
+application. Apollo's media server will downscale larger images to at least the requested size,
+but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String betaFeaturesOn:Boolean!canUpdateAvatar:Boolean!canUpdateEmail:Boolean!canUpdateFullName:Boolean!createdAt:Timestamp!email:String emailModifiedAt:Timestamp emailVerified:Boolean!experimentalFeatures:UserExperimentalFeatures!featureIntros:FeatureIntros fullName:String!"""The user's GitHub username, if they log in via GitHub. May be null even for GitHub users in some edge cases.""" githubUsername:String """The unique identifier for a user.""" id:ID!"""This role is reserved exclusively for internal MDG employees, and it controls what access they may have to other
+organizations. Only admins are allowed to see this field.""" internalAdminRole:InternalMdgAdminRole """Last time any API token from this user was used against AGM services""" lastAuthenticatedAt:Timestamp logoutAfterIdleMs:Int """Which organizations a user belongs to.""" memberships:[UserMembership!]!"""The name (first and last) of a user.""" name:String!odysseyCertifications:[OdysseyCertification!]odysseyCourses:[OdysseyCourse!]odysseyHasEarlyAccess:Boolean!odysseyHasRequestedEarlyAccess:Boolean!odysseyTasks:[OdysseyTask!]sandboxOperationCollections:[OperationCollection!]!synchronized:Boolean!"""List of Zendesk tickets this user has submitted""" tickets:[ZendeskTicket!]type:UserType!}type UserApiKey implements ApiKey{id:ID!keyName:String token:String!}type UserExperimentalFeatures{exampleFeature:Boolean!}"""An organization a given user belongs to.""" type UserMembership{"""The organization a user is a member of.""" account:Account!"""When the user joined the organization.""" createdAt:Timestamp!"""What level of access a use has to an organization.""" permission:UserPermission!"""The user that is a member of an organization.""" user:User!}type UserMutation{acceptPrivacyPolicy:Void """Change the user's password""" changePassword(newPassword:String!previousPassword:String!):Void createOdysseyCertification(certificationId:String!):OdysseyCertification createOdysseyCourses(courses:[OdysseyCourseInput!]!):[OdysseyCourse!]createOdysseyTasks(tasks:[OdysseyTaskInput!]!):[OdysseyTask!]"""Delete the user's avatar. Requires User.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Hard deletes the associated user. Throws an error otherwise with reason included.""" hardDelete:Void """Create a new API key for this user. Must take in a name for this key.""" newKey(keyName:String!):UserApiKey!"""Create a new API key for this user if there are no current API keys.
+If an API key already exists, this will return one at random and not create a new one.""" provisionKey(keyName:String!="add-a-name"):ApiKeyProvision """Refresh information about the user from its upstream service (eg list of organizations from GitHub)""" refresh:User """Removes the given key from this user. Can be used to remove either a web cookie or a user API key.""" removeKey(id:ID!):Void """Renames the given key to the new key name.""" renameKey(id:ID!newKeyName:String):UserApiKey resendVerificationEmail:Void setOdysseyCourse(course:OdysseyCourseInput!):OdysseyCourse setOdysseyTask(task:OdysseyTaskInput!):OdysseyTask """Submit a zendesk ticket for this user""" submitZendeskTicket(collaborators:[String!]email:String!ticket:ZendeskTicketInput!):ZendeskTicket """Update information about a user; all arguments are optional""" update(email:String fullName:String referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """Updates this users' preference concerning opting into beta features.""" updateBetaFeaturesOn(betaFeaturesOn:Boolean!):User """Update the status of a feature for this. For example, if you want to hide an introductory popup.""" updateFeatureIntros(newFeatureIntros:FeatureIntrosInput):User """Update user to have the given internal mdg admin role.
+It is necessary to be an MDG_INTERNAL_SUPER_ADMIN to perform update.
+Additionally, upserting a null value explicitly revokes this user's
+admin status.""" updateRole(newRole:InternalMdgAdminRole):User user:User!verifyEmail(token:String!):User}enum UserPermission{BILLING_MANAGER CONSUMER CONTRIBUTOR GRAPH_ADMIN LEGACY_GRAPH_KEY OBSERVER ORG_ADMIN}enum UserSegment{JOIN_MY_TEAM LOCAL_DEVELOPMENT NOT_SPECIFIED PRODUCTION_GRAPHS SANDBOX SANDBOX_OPERATION_COLLECTIONS TRY_TEAM}type UserSettings{appNavCollapsed:Boolean!autoManageVariables:Boolean!id:String!mockingResponses:Boolean!preflightScriptEnabled:Boolean!responseHints:ResponseHints!tableMode:Boolean!themeName:ThemeName!}"""Explorer user settings input""" input UserSettingsInput{appNavCollapsed:Boolean autoManageVariables:Boolean mockingResponses:Boolean preflightScriptEnabled:Boolean responseHints:ResponseHints tableMode:Boolean themeName:ThemeName}enum UserType{APOLLO GITHUB SSO}type ValidateOperationsResult{validationResults:[ValidationResult!]!}type ValidationError implements Error{message:String!}enum ValidationErrorCode{DEPRECATED_FIELD INVALID_OPERATION NON_PARSEABLE_DOCUMENT}enum ValidationErrorType{FAILURE INVALID WARNING}"""Represents a single validation error, with information relating to the error
+and its respective operation""" type ValidationResult{"""The validation result's error code""" code:ValidationErrorCode!"""Description of the validation error""" description:String!"""The operation related to this validation result""" operation:OperationDocument!"""The type of validation error thrown - warning, failure, or invalid.""" type:ValidationErrorType!}"""Always null""" scalar Void """Webhook notification channel""" type WebhookChannel implements Channel{id:ID!name:String!secretToken:String subscriptions:[ChannelSubscription!]!url:String!}"""PagerDuty notification channel parameters""" input WebhookChannelInput{name:String secretToken:String url:String!}type ZendeskTicket{createdAt:Timestamp!description:String!graph:Service id:Int!organization:Account priority:TicketPriority!status:TicketStatus subject:String!user:User}"""Zendesk ticket input""" input ZendeskTicketInput{description:String!graphId:String organizationId:String priority:TicketPriority!subject:String!}
diff --git a/pkgs/development/tools/rover/update.sh b/pkgs/development/tools/rover/update.sh
new file mode 100755
index 00000000000..50a9945884b
--- /dev/null
+++ b/pkgs/development/tools/rover/update.sh
@@ -0,0 +1,96 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p curl gnugrep gnused jq nix-prefetch
+
+set -eu -o pipefail
+
+dirname=$(realpath "$(dirname "$0")")
+nixpkgs=$(realpath "${dirname}/../../../..")
+
+old_rover_version=$(nix eval --raw -f "$nixpkgs" rover.version)
+rover_url=https://api.github.com/repos/apollographql/rover/releases/latest
+rover_tag=$(curl "$rover_url" | jq --raw-output ".tag_name")
+rover_version="$(expr "$rover_tag" : 'v\(.*\)')"
+
+if [[ "$old_rover_version" == "$rover_version" ]]; then
+    echo "rover is up-to-date: ${old_rover_version}"
+    exit 0
+fi
+
+echo "Fetching rover"
+rover_tar_url="https://github.com/apollographql/rover/archive/refs/tags/${rover_tag}.tar.gz"
+{
+    read rover_hash
+    read repo
+} < <(nix-prefetch-url "$rover_tar_url" --unpack --type sha256 --print-path)
+
+# Convert hash to SRI representation
+rover_sri_hash=$(nix hash to-sri --type sha256 "$rover_hash")
+
+# Identify librusty version and hash
+librusty_version=$(
+    sed --quiet '/^name = "v8"$/{n;p}' "${repo}/Cargo.lock" \
+        | grep --only-matching --perl-regexp '^version = "\K[^"]+'
+)
+librusty_arch=x86_64-unknown-linux-gnu
+librusty_url="https://github.com/denoland/rusty_v8/releases/download/v${librusty_version}/librusty_v8_release_${librusty_arch}.a"
+echo "Fetching librusty"
+librusty_hash=$(nix-prefetch-url "$librusty_url" --type sha256)
+librusty_sri_hash=$(nix hash to-sri --type sha256 "$librusty_hash")
+
+# Update rover version.
+sed --in-place \
+    "s|version = \"[0-9.]*\"|version = \"$rover_version\"|" \
+    "$dirname/default.nix"
+
+# Update rover hash.
+sed --in-place \
+    "s|sha256 = \"[a-zA-Z0-9\/+-=]*\"|sha256 = \"$rover_sri_hash\"|" \
+    "$dirname/default.nix"
+
+# Clear cargoSha256.
+sed --in-place \
+    "s|cargoSha256 = \".*\"|cargoSha256 = \"sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\"|" \
+    "$dirname/default.nix"
+
+# Update cargoSha256
+echo "Computing cargoSha256"
+cargoSha256=$(
+    nix-prefetch "{ sha256 }: (import $nixpkgs {}).rover.cargoDeps.overrideAttrs (_: { outputHash = sha256; })"
+)
+sed --in-place \
+    "s|cargoSha256 = \".*\"|cargoSha256 = \"$cargoSha256\"|" \
+    "$dirname/default.nix"
+
+# Update librusty version
+sed --in-place \
+    "s|version = \"[0-9.]*\"|version = \"$librusty_version\"|" \
+    "$dirname/librusty_v8.nix"
+
+# Update librusty hash
+sed --in-place \
+    "s|x86_64-linux = \"[^\"]*\"|x86_64-linux = \"$librusty_sri_hash\"|" \
+    "$dirname/librusty_v8.nix"
+
+# Update apollo api schema info
+response="$(mktemp)"
+schemaUrl=https://graphql.api.apollographql.com/api/schema
+
+mkdir -p "$dirname"/schema
+
+# Fetch schema info
+echo "Fetching Apollo GraphQL schema"
+# include response headers, and append terminating newline to response body
+curl --include --write-out "\n" "$schemaUrl" > "$response"
+
+# Parse response headers and write the etag to schema/etag.id
+grep \
+    --max-count=1 \
+    --only-matching \
+    --perl-regexp \
+    '^etag: \K\S*' \
+    "$response" \
+    > "$dirname"/schema/etag.id
+
+# Discard headers and blank line (terminated by carriage return), and write the
+# response body to schema/schema.graphql
+sed '1,/^\r/d' "$response" > "$dirname"/schema/schema.graphql
diff --git a/pkgs/development/tools/sentry-cli/default.nix b/pkgs/development/tools/sentry-cli/default.nix
index 740e60c5e15..2fb0f1ebbeb 100644
--- a/pkgs/development/tools/sentry-cli/default.nix
+++ b/pkgs/development/tools/sentry-cli/default.nix
@@ -9,13 +9,13 @@
 }:
 rustPlatform.buildRustPackage rec {
   pname = "sentry-cli";
-  version = "1.73.1";
+  version = "1.74.2";
 
   src = fetchFromGitHub {
     owner = "getsentry";
     repo = "sentry-cli";
     rev = version;
-    sha256 = "sha256-Er23OxFblRby9Vu4GvuPGJ4W0H/poRF5DbN1VXxHPJU=";
+    sha256 = "sha256-1A/c5HiXtT6xUTxVInv9DbbCsqpu8iCJ7I0A9wFeaQ0=";
   };
   doCheck = false;
 
@@ -25,7 +25,7 @@ rustPlatform.buildRustPackage rec {
   buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ Security SystemConfiguration ];
   nativeBuildInputs = [ pkg-config ];
 
-  cargoSha256 = "sha256-gWE/rl7XsRjLq0PVVCpcIA9zwJknU8e3TXcj0gy40aI=";
+  cargoSha256 = "sha256-z++t+Zt40az11z/xhobvvbbSNUBpnMzqlRzoOuYgY2U=";
 
   meta = with lib; {
     homepage = "https://docs.sentry.io/cli/";
diff --git a/pkgs/development/tools/skaffold/default.nix b/pkgs/development/tools/skaffold/default.nix
index 2d2237e7b63..4f286cb22d7 100644
--- a/pkgs/development/tools/skaffold/default.nix
+++ b/pkgs/development/tools/skaffold/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "skaffold";
-  version = "1.36.0";
+  version = "1.37.0";
 
   src = fetchFromGitHub {
     owner = "GoogleContainerTools";
     repo = "skaffold";
     rev = "v${version}";
-    sha256 = "sha256-N5m13wxCtdZ1UBBVmq77XvePJTphMhLoHl4VcTcl3dE=";
+    sha256 = "sha256-mS+q+WOdEMMHjoqoIlDOqkoaRRLlou7FbMjC436k96A=";
   };
 
-  vendorSha256 = "sha256-49UuGVdOlIf50QZ+qlA4kxlrSKouN8zZlby4wMxabe0=";
+  vendorSha256 = "sha256-LiNnTAI7iJkWL657eBw5OsCdvgWE2ZFZ3e+8vJtMhoE=";
 
   subPackages = ["cmd/skaffold"];
 
diff --git a/pkgs/development/tools/toml2json/default.nix b/pkgs/development/tools/toml2json/default.nix
new file mode 100644
index 00000000000..0287b75b362
--- /dev/null
+++ b/pkgs/development/tools/toml2json/default.nix
@@ -0,0 +1,20 @@
+{ lib, rustPlatform, fetchCrate }:
+
+rustPlatform.buildRustPackage rec {
+  pname = "toml2json";
+  version = "1.3.0";
+
+  src = fetchCrate {
+    inherit pname version;
+    sha256 = "sha256-TxTxKHf5g+mBXDq147T5tuwCqyfyoz6Mj55g1tlgRDY=";
+  };
+
+  cargoHash = "sha256-EYp30TMIpzSCkPIqqdc7sGpfaWs9OLi9ey7DoPE4jzI=";
+
+  meta = with lib; {
+    description = "A very small CLI for converting TOML to JSON";
+    homepage = "https://github.com/woodruffw/toml2json";
+    license = with licenses; [ mit ];
+    maintainers = with maintainers; [ rvarago ];
+  };
+}
diff --git a/pkgs/development/tools/wrangler/default.nix b/pkgs/development/tools/wrangler/default.nix
index fba7d96c1da..5752fe1477c 100644
--- a/pkgs/development/tools/wrangler/default.nix
+++ b/pkgs/development/tools/wrangler/default.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "wrangler";
-  version = "1.19.8";
+  version = "1.19.9";
 
   src = fetchFromGitHub {
     owner = "cloudflare";
     repo = pname;
     rev = "v${version}";
-    sha256 = "sha256-vJjAN7RmB1J4k7p2emfbjJxkpfph6piinmqVTR67HW0=";
+    sha256 = "sha256-cuntghTMGrAcrPunyi9ZWlxDcryYv7R6S3V8WJjEUtQ=";
   };
 
-  cargoSha256 = "sha256-dDQvcYnceBPDc+yeePjZ1k4a2ujCSh1hJMYFjPGw/bE=";
+  cargoSha256 = "sha256-gao8vCfzb81GUte6WAt2x/pxecg443bpQxvUSQCXL40=";
 
   nativeBuildInputs = [ pkg-config ];
 
diff --git a/pkgs/development/tools/yarn/default.nix b/pkgs/development/tools/yarn/default.nix
index 50d29cb4a1d..0e39a714ccd 100644
--- a/pkgs/development/tools/yarn/default.nix
+++ b/pkgs/development/tools/yarn/default.nix
@@ -2,11 +2,11 @@
 
 stdenv.mkDerivation rec {
   pname = "yarn";
-  version = "1.22.17";
+  version = "1.22.18";
 
   src = fetchzip {
     url = "https://github.com/yarnpkg/yarn/releases/download/v${version}/yarn-v${version}.tar.gz";
-    sha256 = "1skzlyv2976bl1063f94422jbjy4ns1nxl622biizq31z4821yvj";
+    sha256 = "sha256-gI4v/WPWrNa2i2oct8Ns7bpDzmDCy+c86pGKpNznhh0=";
   };
 
   buildInputs = [ nodejs ];
diff --git a/pkgs/development/tools/yq-go/default.nix b/pkgs/development/tools/yq-go/default.nix
index e8cd2a134fd..4a37a4ac91f 100644
--- a/pkgs/development/tools/yq-go/default.nix
+++ b/pkgs/development/tools/yq-go/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "yq-go";
-  version = "4.22.1";
+  version = "4.23.1";
 
   src = fetchFromGitHub {
     owner = "mikefarah";
     repo = "yq";
     rev = "v${version}";
-    sha256 = "sha256-+yVf9pSK2cH/d5fdaGSBrjce8vVqE9XMpZSI8s4xoJI=";
+    sha256 = "sha256-vYitX3Gvffo/MbSYAJv5HV74IcYZK7hIEd1xRQf/COU=";
   };
 
-  vendorSha256 = "sha256-F11FnDYJ59aKrdRXDPpKlhX52yQXdaN1sblSkVI2j9w=";
+  vendorSha256 = "sha256-R40zU0jOc/eIFVDsWG3+4o51iro7Sd7jwtyH/fpWVZs=";
 
   doCheck = false;
 
diff --git a/pkgs/games/rare/default.nix b/pkgs/games/rare/default.nix
new file mode 100644
index 00000000000..9bf3aca7542
--- /dev/null
+++ b/pkgs/games/rare/default.nix
@@ -0,0 +1,64 @@
+{ lib, fetchPypi, buildPythonApplication, makeDesktopItem, copyDesktopItems, qt5
+, pillow, psutil, pypresence, pyqt5, python, qtawesome, requests }:
+
+buildPythonApplication rec {
+  pname = "rare";
+  version = "1.8.8";
+
+  src = fetchPypi {
+    inherit version;
+    pname = "Rare";
+    sha256 = "sha256-00CtvBqSrT9yJUHZ5529VrIQtCOYkHRc8+rJHmrTSpg=";
+  };
+
+  nativeBuildInputs = [
+    copyDesktopItems
+    qt5.wrapQtAppsHook
+  ];
+
+  propagatedBuildInputs = [
+    pillow
+    psutil
+    pypresence
+    pyqt5
+    qtawesome
+    requests
+  ];
+
+  desktopItems = [
+    (makeDesktopItem {
+      name = pname;
+      exec = "rare";
+      icon = "Rare";
+      comment = meta.description;
+      desktopName = "Rare";
+      genericName = "Rare (Epic Games Launcher Open Source Alternative)";
+    })
+  ];
+
+  dontWrapQtApps = true;
+
+  preBuild = ''
+    # Solves "PermissionError: [Errno 13] Permission denied: '/homeless-shelter'"
+    export HOME=$(mktemp -d)
+  '';
+
+  postInstall = ''
+    install -Dm644 $out/${python.sitePackages}/rare/resources/images/Rare.png -t $out/share/pixmaps/
+  '';
+
+  preFixup = ''
+    makeWrapperArgs+=("''${qtWrapperArgs[@]}")
+  '';
+
+  # Project has no tests
+  doCheck = false;
+
+  meta = with lib; {
+    description = "GUI for Legendary, an Epic Games Launcher open source alternative";
+    homepage = "https://github.com/Dummerle/Rare";
+    maintainers = with maintainers; [ wolfangaukang ];
+    license = licenses.gpl3Only;
+    platforms = platforms.linux;
+  };
+}
diff --git a/pkgs/os-specific/linux/pflask/default.nix b/pkgs/os-specific/linux/pflask/default.nix
index ba525c1a387..deb3e06ca49 100644
--- a/pkgs/os-specific/linux/pflask/default.nix
+++ b/pkgs/os-specific/linux/pflask/default.nix
@@ -1,18 +1,22 @@
-{ lib, stdenv, fetchFromGitHub, python2, wafHook }:
+{ lib, stdenv, fetchFromGitHub, python3, wafHook }:
 
 stdenv.mkDerivation rec {
   pname = "pflask";
-  version = "unstable-2015-12-17";
+  version = "unstable-2018-01-23";
 
   src = fetchFromGitHub {
     owner = "ghedo";
-    repo = "pflask";
-    rev = "599418bb6453eaa0ccab493f9411f13726c1a636";
-    hash = "sha256-0RjitZd2JUK7WUEJuw4qhUx3joY5OI0Hh74mTzp7GmY=";
+    repo = pname;
+    rev = "9ac31ffe2ed29453218aac89ae992abbd6e7cc69";
+    hash = "sha256-bAKPUj/EipZ98kHbZiFZZI3hLVMoQpCrYKMmznpSDhg=";
   };
 
-  nativeBuildInputs = [ wafHook ];
-  buildInputs = [ python2 ];
+  nativeBuildInputs = [ python3 wafHook ];
+
+  postInstall = ''
+    mkdir -p $out/bin
+    cp build/pflask $out/bin
+  '';
 
   meta = {
     description = "Lightweight process containers for Linux";
diff --git a/pkgs/os-specific/linux/smem/default.nix b/pkgs/os-specific/linux/smem/default.nix
index b2636382aec..6308b83b600 100644
--- a/pkgs/os-specific/linux/smem/default.nix
+++ b/pkgs/os-specific/linux/smem/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, fetchurl, python2 }:
+{ lib, stdenv, fetchurl, python3 }:
 
 stdenv.mkDerivation rec {
   pname = "smem";
@@ -9,7 +9,7 @@ stdenv.mkDerivation rec {
     sha256 = "19ibv1byxf2b68186ysrgrhy5shkc5mc69abark1h18yigp3j34m";
   };
 
-  buildInputs = [ python2 ];
+  buildInputs = [ python3 ];
 
   makeFlags = [ "smemcap" ];
 
diff --git a/pkgs/servers/atlassian/jira.nix b/pkgs/servers/atlassian/jira.nix
index 014fbedf643..7880c529c73 100644
--- a/pkgs/servers/atlassian/jira.nix
+++ b/pkgs/servers/atlassian/jira.nix
@@ -8,11 +8,11 @@
 
 stdenv.mkDerivation rec {
   pname = "atlassian-jira";
-  version = "8.21.0";
+  version = "8.22.0";
 
   src = fetchurl {
     url = "https://product-downloads.atlassian.com/software/jira/downloads/atlassian-jira-software-${version}.tar.gz";
-    sha256 = "sha256-AJWEm6JBHINca9l0R6dILLMkPGzFVuVjOqD/4AaO5nI=";
+    sha256 = "sha256-swEq8g/A1Ok1P7JtceUDId7kG0GRtBPGblY09xt81Ys=";
   };
 
   buildPhase = ''
diff --git a/pkgs/servers/dns/knot-resolver/default.nix b/pkgs/servers/dns/knot-resolver/default.nix
index 4d12a6d7172..d47b5a3e6a3 100644
--- a/pkgs/servers/dns/knot-resolver/default.nix
+++ b/pkgs/servers/dns/knot-resolver/default.nix
@@ -17,11 +17,11 @@ lua = luajitPackages;
 
 unwrapped = stdenv.mkDerivation rec {
   pname = "knot-resolver";
-  version = "5.4.4";
+  version = "5.5.0";
 
   src = fetchurl {
     url = "https://secure.nic.cz/files/knot-resolver/${pname}-${version}.tar.xz";
-    sha256 = "588964319e943679d391cc9c886d40ef858ecd9b33ae160023b4e2b5182b2cea";
+    sha256 = "4e6f48c74d955f143d603f6072670cb41ab9acdd95d4455d6e74b6908562c55a";
   };
 
   outputs = [ "out" "dev" ];
diff --git a/pkgs/servers/imgproxy/default.nix b/pkgs/servers/imgproxy/default.nix
index 2b7ed9e1c9c..b6dd99edfdd 100644
--- a/pkgs/servers/imgproxy/default.nix
+++ b/pkgs/servers/imgproxy/default.nix
@@ -3,16 +3,16 @@
 
 buildGoModule rec {
   pname = "imgproxy";
-  version = "3.3.1";
+  version = "3.3.2";
 
   src = fetchFromGitHub {
     owner = pname;
     repo = pname;
-    sha256 = "sha256-GZYaFK6g26gbVa3sHwTZ4fNGMFWBWevqcfJc/3SC890=";
+    sha256 = "sha256-LJsiZeKgetFTqX58I82jDr8fIgYJCDVhb44yg8uc/8w=";
     rev = "v${version}";
   };
 
-  vendorSha256 = "sha256-uV5pnnvVYviw2LnceQUiTJXva3WI51pgW6IeZzVhULc=";
+  vendorSha256 = "sha256-088VEntNx3ZX2p6EiAZ6nSeWmM32XLAOmqXu2sd9QG4=";
 
   doCheck = false;
 
diff --git a/pkgs/servers/jackett/default.nix b/pkgs/servers/jackett/default.nix
index 865c4490a04..69e891fe595 100644
--- a/pkgs/servers/jackett/default.nix
+++ b/pkgs/servers/jackett/default.nix
@@ -9,13 +9,13 @@
 
 buildDotnetModule rec {
   pname = "jackett";
-  version = "0.20.671";
+  version = "0.20.709";
 
   src = fetchFromGitHub {
     owner = pname;
     repo = pname;
     rev = "v${version}";
-    sha256 = "AoOaBryZHkqp5SabfKeW5E1YoJCwGhg2luK8SQvabow=";
+    sha256 = "Gx1VHjs37XBcvw20pQNrA/meLuVmogdGIzroRXvTv5Q=";
   };
 
   projectFile = "src/Jackett.Server/Jackett.Server.csproj";
diff --git a/pkgs/servers/jackett/deps.nix b/pkgs/servers/jackett/deps.nix
index 3b0fb8cb78b..022c604d0a9 100644
--- a/pkgs/servers/jackett/deps.nix
+++ b/pkgs/servers/jackett/deps.nix
@@ -152,6 +152,9 @@
   (fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "5.0.0"; sha256 = "0swqcknyh87ns82w539z1mvy804pfwhgzs97cr3nwqk6g5s42gd6"; })
   (fetchNuGet { pname = "Microsoft.Extensions.Primitives"; version = "6.0.0"; sha256 = "1kjiw6s4yfz9gm7mx3wkhp06ghnbs95icj9hi505shz9rjrg42q2"; })
   (fetchNuGet { pname = "Microsoft.Extensions.WebEncoders"; version = "2.2.0"; sha256 = "1mhnichccw6mjf37d38q2i1kr7qp485m7apa1b872ji0q16yy1y3"; })
+  (fetchNuGet { pname = "Microsoft.Net.Http.Headers"; version = "2.1.0"; sha256 = "1a02bll0flsin9grd3jxbi76ycl42qamynalbp7qbqcsprabw3ai"; })
+  (fetchNuGet { pname = "Microsoft.Net.Http.Headers"; version = "2.2.0"; sha256 = "0w6lrk9z67bcirq2cj2ldfhnizc6id77ba6i30hjzgqjlyhh1gx5"; })
+  (fetchNuGet { pname = "Microsoft.NET.Test.Sdk"; version = "17.0.0"; sha256 = "0bknyf5kig5icwjxls7pcn51x2b2qf91dz9qv67fl70v6cczaz2r"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.0.1"; sha256 = "01al6cfxp68dscl15z7rxfw9zvhm64dncsw09a1vmdkacsa2v6lr"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "1.1.0"; sha256 = "08vh1r12g6ykjygq5d3vq09zylgb84l63k49jc4v8faw9g93iqqm"; })
   (fetchNuGet { pname = "Microsoft.NETCore.Platforms"; version = "5.0.0"; sha256 = "0mwpwdflidzgzfx2dlpkvvnkgkr2ayaf0s80737h4wa35gaj11rc"; })
@@ -159,9 +162,6 @@
   (fetchNuGet { pname = "Microsoft.NETCore.Targets"; version = "1.1.0"; sha256 = "193xwf33fbm0ni3idxzbr5fdq3i2dlfgihsac9jj7whj0gd902nh"; })
   (fetchNuGet { pname = "Microsoft.NETFramework.ReferenceAssemblies"; version = "1.0.0"; sha256 = "0na724xhvqm63vq9y18fl9jw9q2v99bdwr353378s5fsi11qzxp9"; })
   (fetchNuGet { pname = "Microsoft.NETFramework.ReferenceAssemblies.net461"; version = "1.0.0"; sha256 = "00vkn4c6i0rn1l9pv912y0wgb9h6ks76qah8hvk441nari8fqbm1"; })
-  (fetchNuGet { pname = "Microsoft.Net.Http.Headers"; version = "2.1.0"; sha256 = "1a02bll0flsin9grd3jxbi76ycl42qamynalbp7qbqcsprabw3ai"; })
-  (fetchNuGet { pname = "Microsoft.Net.Http.Headers"; version = "2.2.0"; sha256 = "0w6lrk9z67bcirq2cj2ldfhnizc6id77ba6i30hjzgqjlyhh1gx5"; })
-  (fetchNuGet { pname = "Microsoft.NET.Test.Sdk"; version = "17.0.0"; sha256 = "0bknyf5kig5icwjxls7pcn51x2b2qf91dz9qv67fl70v6cczaz2r"; })
   (fetchNuGet { pname = "Microsoft.TestPlatform.ObjectModel"; version = "17.0.0"; sha256 = "1bh5scbvl6ndldqv20sl34h4y257irm9ziv2wyfc3hka6912fhn7"; })
   (fetchNuGet { pname = "Microsoft.TestPlatform.TestHost"; version = "17.0.0"; sha256 = "06mn31cgpp7d8lwdyjanh89prc66j37dchn74vrd9s588rq0y70r"; })
   (fetchNuGet { pname = "Microsoft.Win32.Primitives"; version = "4.3.0"; sha256 = "0j0c1wj4ndj21zsgivsc24whiya605603kxrbiw6wkfdync464wq"; })
@@ -185,8 +185,8 @@
   (fetchNuGet { pname = "NLog.Web.AspNetCore"; version = "4.14.0"; sha256 = "1q2v44inp4xjynncxpv432k2qjkfara1bpipmv3p3in0yv14l3wg"; })
   (fetchNuGet { pname = "NuGet.Frameworks"; version = "5.0.0"; sha256 = "18ijvmj13cwjdrrm52c8fpq021531zaz4mj4b4zapxaqzzxf2qjr"; })
   (fetchNuGet { pname = "NUnit"; version = "3.13.2"; sha256 = "00bkjgarkwbj497da9d7lajala1ns67h1kx53w4bapwkf32jlcvn"; })
-  (fetchNuGet { pname = "NUnit3TestAdapter"; version = "4.1.0"; sha256 = "1z5g15npmsjszhfmkrdmp4ds7jpxzhxblss2rjl5mfn5sihy4cww"; })
   (fetchNuGet { pname = "NUnit.ConsoleRunner"; version = "3.12.0"; sha256 = "00ihk6i3wzqndrn6yyh1csh4b3h226x5kwdp3716p75p4nljs1ik"; })
+  (fetchNuGet { pname = "NUnit3TestAdapter"; version = "4.1.0"; sha256 = "1z5g15npmsjszhfmkrdmp4ds7jpxzhxblss2rjl5mfn5sihy4cww"; })
   (fetchNuGet { pname = "Polly"; version = "7.2.2"; sha256 = "0s15n5zwj44i6sw3v40ca8l6j0ijydxcakvad49j52rp49iwrmkn"; })
   (fetchNuGet { pname = "runtime.debian.8-x64.runtime.native.System.Security.Cryptography.OpenSsl"; version = "4.3.0"; sha256 = "16rnxzpk5dpbbl1x354yrlsbvwylrq456xzpsha1n9y3glnhyx9d"; })
   (fetchNuGet { pname = "runtime.fedora.23-x64.runtime.native.System.Security.Cryptography.OpenSsl"; version = "4.3.0"; sha256 = "0hkg03sgm2wyq8nqk6dbm9jh5vcq57ry42lkqdmfklrw89lsmr59"; })
diff --git a/pkgs/servers/mastodon/default.nix b/pkgs/servers/mastodon/default.nix
index a173c594c6b..9173f72eecc 100644
--- a/pkgs/servers/mastodon/default.nix
+++ b/pkgs/servers/mastodon/default.nix
@@ -1,4 +1,4 @@
-{ lib, stdenv, nodejs-slim, mkYarnPackage, fetchFromGitHub, fetchpatch, bundlerEnv
+{ lib, stdenv, nodejs-slim, mkYarnPackage, fetchFromGitHub, fetchpatch, bundlerEnv, nixosTests
 , yarn, callPackage, imagemagick, ffmpeg, file, ruby_3_0, writeShellScript
 
   # Allow building a fork or custom version of Mastodon:
@@ -119,6 +119,8 @@ stdenv.mkDerivation rec {
     ln -s ${run-streaming} $out/run-streaming.sh
   '';
 
+  passthru.tests.mastodon = nixosTests.mastodon;
+
   meta = with lib; {
     description = "Self-hosted, globally interconnected microblogging software based on ActivityPub";
     homepage = "https://joinmastodon.org";
diff --git a/pkgs/servers/misc/virtiofsd/default.nix b/pkgs/servers/misc/virtiofsd/default.nix
index 6d5ebbb76c0..d8ae7ca61d9 100644
--- a/pkgs/servers/misc/virtiofsd/default.nix
+++ b/pkgs/servers/misc/virtiofsd/default.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "virtiofsd";
-  version = "1.0.0";
+  version = "1.1.0";
 
   src = fetchFromGitLab {
     owner = "virtio-fs";
     repo = "virtiofsd";
     rev = "v${version}";
-    sha256 = "010xf482qip91mv91wy9zjdsq0gfg1fd6iclrcry0nfnwlbigbwd";
+    sha256 = "sha256-WB0zu2M/5enBOoOUUSXnNAkbsA+JzDgtoLncE1YcDLs=";
   };
 
-  cargoSha256 = "0bfvqbmvkf17slra5k0nnva6j6w07769k226qnbzb3947zf4x2ga";
+  cargoSha256 = "sha256-uRPmZE/xc0yeurBZ4rnrZua5d4lbPwStMUacFgbquuk=";
 
   buildInputs = [ libcap_ng libseccomp ];
 
diff --git a/pkgs/servers/monitoring/prometheus/apcupsd-exporter.nix b/pkgs/servers/monitoring/prometheus/apcupsd-exporter.nix
index 64105c9c58f..c2f90322850 100644
--- a/pkgs/servers/monitoring/prometheus/apcupsd-exporter.nix
+++ b/pkgs/servers/monitoring/prometheus/apcupsd-exporter.nix
@@ -2,18 +2,16 @@
 
 buildGoModule rec {
   pname = "apcupsd-exporter";
-  version = "0.2.0";
+  version = "0.3.0";
 
   src = fetchFromGitHub {
     owner = "mdlayher";
     repo = "apcupsd_exporter";
     rev = "v${version}";
-    sha256 = "0gjj23qdjs7rqimq95rbfw43m4l6g73j840svxjlmpd1vzzz2v2q";
+    sha256 = "sha256-c0LsUqpJbmWQmbmSGdEy7Bbk20my6iWNLeqtU5BjYlw=";
   };
 
-  vendorSha256 = "09x8y8pmgfn897hvnk122ry460y12b8a7y5fafri5wn9vxab9r82";
-
-  doCheck = false;
+  vendorSha256 = "sha256-bvLwHLviIAGmxYY1O0wFDWAMginEUklicrbjIbbPuUw=";
 
   passthru.tests = { inherit (nixosTests.prometheus-exporters) apcupsd; };
 
diff --git a/pkgs/servers/monitoring/prometheus/wireguard-exporter.nix b/pkgs/servers/monitoring/prometheus/wireguard-exporter.nix
index 1dbb3179627..61cf36f882d 100644
--- a/pkgs/servers/monitoring/prometheus/wireguard-exporter.nix
+++ b/pkgs/servers/monitoring/prometheus/wireguard-exporter.nix
@@ -2,16 +2,16 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "wireguard-exporter";
-  version = "3.5.0";
+  version = "3.6.2";
 
   src = fetchFromGitHub {
     owner = "MindFlavor";
     repo = "prometheus_wireguard_exporter";
     rev = version;
-    sha256 = "sha256-LHhqQ0p2qt6ZAdkpY1SEAcGXH47TPhHvlDv+eL8GC58=";
+    sha256 = "sha256-eVGyBynKZLGlsaLwUOx7cJWdRHl65S0Wk1K5c9T8ysQ=";
   };
 
-  cargoSha256 = "sha256-lNFsO7FSmH1+DLM7ID0vn6234qTdtUoaLSnqKcbHoXE=";
+  cargoSha256 = "sha256-JbFoaMTs6TPYq2qgBkT7WX1itMXohgcWbC1UvaXOi8o=";
 
   buildInputs = lib.optionals stdenv.isDarwin [ libiconv Security ];
 
diff --git a/pkgs/servers/pleroma/default.nix b/pkgs/servers/pleroma/default.nix
index ec7de2ac57c..dd4025a76c0 100644
--- a/pkgs/servers/pleroma/default.nix
+++ b/pkgs/servers/pleroma/default.nix
@@ -1,7 +1,8 @@
 { lib, beamPackages
 , fetchFromGitHub, fetchFromGitLab
-, file, cmake
+, file, cmake, bash
 , nixosTests, writeText
+, cookieFile ? null
 , ...
 }:
 
@@ -17,6 +18,34 @@ beamPackages.mixRelease rec {
     sha256 = "sha256-RcqqNNNCR4cxETUCyjChkpq+cQ1QzNOHHzdqBLtOc6g=";
   };
 
+  preFixup = if (cookieFile != null) then ''
+    # There's no way to use a subprocess to cat the content of the
+    # file cookie using wrapProgram: it gets escaped (by design) with
+    # a pair of backticks :(
+    # We have to come up with our own custom wrapper to do this.
+    function wrapWithCookie () {
+        local hidden
+        hidden="$(dirname "$1")/.$(basename "$1")"-wrapped
+        while [ -e "$hidden" ]; do
+            hidden="''${hidden}_"
+        done
+        mv "$1" "''${hidden}"
+
+        cat > "$1" << EOF
+    #!${bash}/bin/bash
+    export RELEASE_COOKIE="\$(cat "${cookieFile}")"
+    exec -a "\$0" "''${hidden}" "\$@"
+    EOF
+        chmod +x "$1"
+    }
+
+    for f in "$out"/bin/*; do
+        if [[ -x "$f" ]]; then
+            wrapWithCookie "$f"
+        fi
+    done
+  '' else "";
+
   mixNixDeps = import ./mix.nix {
     inherit beamPackages lib;
     overrides = (final: prev: {
diff --git a/pkgs/servers/sabnzbd/default.nix b/pkgs/servers/sabnzbd/default.nix
index 764c61356f3..5f12ae0ee95 100644
--- a/pkgs/servers/sabnzbd/default.nix
+++ b/pkgs/servers/sabnzbd/default.nix
@@ -24,14 +24,14 @@ let
   ]);
   path = lib.makeBinPath [ par2cmdline unrar unzip p7zip ];
 in stdenv.mkDerivation rec {
-  version = "3.5.2";
+  version = "3.5.3";
   pname = "sabnzbd";
 
   src = fetchFromGitHub {
     owner = pname;
     repo = pname;
     rev = version;
-    sha256 = "sha256-dGmZxnrxuUj6HwFI5QkSy9FnGYQpsNPFbUKXoJWpDfM=";
+    sha256 = "sha256-pdYTTahdn9YVFreU5KhMGlUzQxHviN5G4TxWKKRBxOc=";
   };
 
   nativeBuildInputs = [ makeWrapper ];
diff --git a/pkgs/servers/sql/materialize/default.nix b/pkgs/servers/sql/materialize/default.nix
index ce91c322af1..9dcfb8fe3e5 100644
--- a/pkgs/servers/sql/materialize/default.nix
+++ b/pkgs/servers/sql/materialize/default.nix
@@ -40,17 +40,17 @@ let
 in
 rustPlatform.buildRustPackage rec {
   pname = "materialize";
-  version = "0.15.0";
-  MZ_DEV_BUILD_SHA = "f79f63205649d6011822893c5b55396b2bef7b0b";
+  version = "0.17.0";
+  MZ_DEV_BUILD_SHA = "9f8cf75b461d288335cb6a7a73aaa670bab4a466";
 
   src = fetchFromGitHub {
     owner = "MaterializeInc";
     repo = pname;
     rev = "v${version}";
-    hash = "sha256-/A6+0fehBa8XEB8P8QUV5Lsl9Lwfz4FhQLgotvBG1Gw=";
+    hash = "sha256-wKYU5S77VoOX7UA9/d21Puz9NYs/om08eNM69/m3Orc=";
   };
 
-  cargoHash = "sha256-NJvAIy9b39HWJaG860Mlf3WasanUnz+Nq39k4WpddB0=";
+  cargoHash = "sha256-GTkn/fUprkpsDeQxtzdmS7Fub9QODO5/4nh9ERswOY0=";
 
   nativeBuildInputs = [ cmake perl pkg-config ]
     # Provides the mig command used by the krb5-src build script
@@ -71,9 +71,6 @@ rustPlatform.buildRustPackage rec {
     "--skip test_client_subject_and_references"
     "--skip test_no_block"
     "--skip test_safe_mode"
-    # this test is broken on 0.15.0
-    # TODO: re-add it in a subsequent release
-    "--skip test_threads"
     "--skip test_tls"
   ];
 
diff --git a/pkgs/servers/tailscale/default.nix b/pkgs/servers/tailscale/default.nix
index 8decb2f4d29..7c7bb84e15b 100644
--- a/pkgs/servers/tailscale/default.nix
+++ b/pkgs/servers/tailscale/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "tailscale";
-  version = "1.22.1";
+  version = "1.22.2";
 
   src = fetchFromGitHub {
     owner = "tailscale";
     repo = "tailscale";
     rev = "v${version}";
-    sha256 = "sha256-VUML5GwHrRYPd9lnOZuMA3T1SfdC0rVLP5m1yf+SA0A=";
+    sha256 = "sha256-W4BcUDMxUZKFXueSI/Xlml17Jabi/hnnOyXgitao76A=";
   };
 
   nativeBuildInputs = lib.optionals stdenv.isLinux [ makeWrapper ];
diff --git a/pkgs/servers/web-apps/wordpress/default.nix b/pkgs/servers/web-apps/wordpress/default.nix
index 343a1c345c6..1e6b40865ee 100644
--- a/pkgs/servers/web-apps/wordpress/default.nix
+++ b/pkgs/servers/web-apps/wordpress/default.nix
@@ -2,11 +2,11 @@
 
 stdenv.mkDerivation rec {
   pname = "wordpress";
-  version = "5.9";
+  version = "5.9.2";
 
   src = fetchurl {
     url = "https://wordpress.org/${pname}-${version}.tar.gz";
-    sha256 = "sha256-RVg45GvS0wqEka6b0lv3Acgu1p28fImAbioTCGjG/7c=";
+    sha256 = "sha256-d2Xy3SpWzpIHmXh8x5BKWF9jNlvKK6D3uwbqIGDGn4s=";
   };
 
   installPhase = ''
diff --git a/pkgs/test/haskell/shellFor/default.nix b/pkgs/test/haskell/shellFor/default.nix
index 04f5e045361..aa06ff6e52f 100644
--- a/pkgs/test/haskell/shellFor/default.nix
+++ b/pkgs/test/haskell/shellFor/default.nix
@@ -2,6 +2,7 @@
 
 (haskellPackages.shellFor {
   packages = p: [ p.constraints p.linear ];
+  extraDependencies = p: { libraryHaskellDepends = [ p.releaser ]; };
   nativeBuildInputs = [ cabal-install ];
   phases = [ "unpackPhase" "buildPhase" "installPhase" ];
   unpackPhase = ''
@@ -16,6 +17,16 @@
     export HOME=$(mktemp -d)
     mkdir -p $HOME/.cabal
     touch $HOME/.cabal/config
+
+    # Check extraDependencies.libraryHaskellDepends arg
+    ghci <<EOF
+    :m + Releaser.Primitives
+    :m + System.IO
+    writeFile "done" "done"
+    EOF
+    [[ done == $(cat done) ]]
+
+    # Check packages arg
     cabal v2-build --offline --verbose constraints linear --ghc-options="-O0 -j$NIX_BUILD_CORES"
   '';
   installPhase = ''
diff --git a/pkgs/tools/X11/jumpapp/default.nix b/pkgs/tools/X11/jumpapp/default.nix
index c152a72a23d..cce6ced7573 100644
--- a/pkgs/tools/X11/jumpapp/default.nix
+++ b/pkgs/tools/X11/jumpapp/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "jumpapp";
-  version = "1.1";
+  version = "1.2";
 
   src = fetchFromGitHub {
     owner = "mkropat";
     repo = "jumpapp";
     rev = "v${version}";
-    sha256 = "1jrk4mm42sz6ca2gkb6w3dad53d4im4shpgsq8s4vr6xpl3b43ry";
+    sha256 = "sha256-9sh0+zpDxwqRGC1jUgGTDdSDRdAFsL12mQ/Opwh/UBc=";
   };
 
   makeFlags = [ "PREFIX=$(out)" ];
diff --git a/pkgs/tools/admin/exoscale-cli/default.nix b/pkgs/tools/admin/exoscale-cli/default.nix
index 6cb52ccd135..f1a420b0c10 100644
--- a/pkgs/tools/admin/exoscale-cli/default.nix
+++ b/pkgs/tools/admin/exoscale-cli/default.nix
@@ -2,13 +2,13 @@
 
 buildGoPackage rec {
   pname = "exoscale-cli";
-  version = "1.51.2";
+  version = "1.52.0";
 
   src = fetchFromGitHub {
     owner  = "exoscale";
     repo   = "cli";
     rev    = "v${version}";
-    sha256 = "sha256-Nx9lASZOEetkADVEs2JxPRi9SCrb4SLnEpLpzcpp/Is=";
+    sha256 = "sha256-uaJt1QS4nDVliIFo11OhQYvOxJMeda0QGuaGZCPRoWk=";
   };
 
   goPackagePath = "github.com/exoscale/cli";
diff --git a/pkgs/tools/admin/trinsic-cli/default.nix b/pkgs/tools/admin/trinsic-cli/default.nix
index d8e084218d4..d280fac0733 100644
--- a/pkgs/tools/admin/trinsic-cli/default.nix
+++ b/pkgs/tools/admin/trinsic-cli/default.nix
@@ -2,11 +2,11 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "trinsic-cli";
-  version = "1.3.0";
+  version = "1.4.0";
 
   src = fetchurl {
     url = "https://github.com/trinsic-id/sdk/releases/download/v${version}/trinsic-cli-vendor-${version}.tar.gz";
-    sha256 = "4ec8a02cf7cd31822668e97befe96f0a7a32b1103abfe27c1bff643d3bf16588";
+    sha256 = "sha256-Dxmjbd1Q2JNeET22Fte7bygd+oH3ZfovRTJh5xforuw=";
   };
 
   cargoVendorDir = "vendor";
diff --git a/pkgs/tools/filesystems/squashfs/darwin.patch b/pkgs/tools/filesystems/squashfs/darwin.patch
index 5f2e0b07299..657971c5b0a 100644
--- a/pkgs/tools/filesystems/squashfs/darwin.patch
+++ b/pkgs/tools/filesystems/squashfs/darwin.patch
@@ -5,7 +5,7 @@ with BSD-specific changes omitted.
 See also https://github.com/plougher/squashfs-tools/pull/69.
 
 diff --git a/squashfs-tools/action.c b/squashfs-tools/action.c
-index 4b06ccb..3cad2ab 100644
+index ea2f604..9c979f8 100644
 --- a/squashfs-tools/action.c
 +++ b/squashfs-tools/action.c
 @@ -39,6 +39,10 @@
@@ -19,7 +19,7 @@ index 4b06ccb..3cad2ab 100644
  #include "squashfs_fs.h"
  #include "mksquashfs.h"
  #include "action.h"
-@@ -2414,9 +2418,12 @@ static char *get_start(char *s, int n)
+@@ -2415,9 +2419,12 @@ static char *get_start(char *s, int n)
  
  static int subpathname_fn(struct atom *atom, struct action_data *action_data)
  {
@@ -34,7 +34,7 @@ index 4b06ccb..3cad2ab 100644
  
  /*
 diff --git a/squashfs-tools/info.c b/squashfs-tools/info.c
-index fe23d78..5c2f835 100644
+index 216b979..eea2ec9 100644
 --- a/squashfs-tools/info.c
 +++ b/squashfs-tools/info.c
 @@ -144,31 +144,22 @@ void dump_state()
@@ -89,7 +89,7 @@ index fe23d78..5c2f835 100644
  }
  
 diff --git a/squashfs-tools/mksquashfs.c b/squashfs-tools/mksquashfs.c
-index a45b77f..3607448 100644
+index 843f9f4..ed2c3a6 100644
 --- a/squashfs-tools/mksquashfs.c
 +++ b/squashfs-tools/mksquashfs.c
 @@ -35,7 +35,12 @@
@@ -117,7 +117,7 @@ index a45b77f..3607448 100644
  
  #ifndef linux
  #include <sys/sysctl.h>
-@@ -5022,6 +5030,7 @@ static void initialise_threads(int readq, int fragq, int bwriteq, int fwriteq,
+@@ -5064,6 +5072,7 @@ static void initialise_threads(int readq, int fragq, int bwriteq, int fwriteq,
  	sigemptyset(&sigmask);
  	sigaddset(&sigmask, SIGQUIT);
  	sigaddset(&sigmask, SIGHUP);
@@ -125,7 +125,7 @@ index a45b77f..3607448 100644
  	if(pthread_sigmask(SIG_BLOCK, &sigmask, NULL) != 0)
  		BAD_ERROR("Failed to set signal mask in intialise_threads\n");
  
-@@ -5760,6 +5769,35 @@ static int get_physical_memory()
+@@ -5802,6 +5811,35 @@ static int get_physical_memory()
  	long long page_size = sysconf(_SC_PAGESIZE);
  	int phys_mem;
  
@@ -161,7 +161,7 @@ index a45b77f..3607448 100644
  	if(num_pages == -1 || page_size == -1) {
  		struct sysinfo sys;
  		int res = sysinfo(&sys);
-@@ -5772,6 +5810,7 @@ static int get_physical_memory()
+@@ -5814,6 +5852,7 @@ static int get_physical_memory()
  	}
  
  	phys_mem = num_pages * page_size >> 20;
@@ -170,7 +170,7 @@ index a45b77f..3607448 100644
  	if(phys_mem < SQUASHFS_LOWMEM)
  		BAD_ERROR("Mksquashfs requires more physical memory than is "
 diff --git a/squashfs-tools/read_xattrs.c b/squashfs-tools/read_xattrs.c
-index 4debedf..3257c30 100644
+index 2067f80..ca8b7f4 100644
 --- a/squashfs-tools/read_xattrs.c
 +++ b/squashfs-tools/read_xattrs.c
 @@ -31,13 +31,13 @@
@@ -186,11 +186,11 @@ index 4debedf..3257c30 100644
  
 -#include <stdlib.h>
 -
- extern int read_fs_bytes(int, long long, int, void *);
+ extern int read_fs_bytes(int, long long, long long, void *);
  extern int read_block(int, long long, long long *, int, void *);
  
 diff --git a/squashfs-tools/unsquashfs.c b/squashfs-tools/unsquashfs.c
-index 727f1d5..c1a6183 100644
+index d434b42..1208e45 100644
 --- a/squashfs-tools/unsquashfs.c
 +++ b/squashfs-tools/unsquashfs.c
 @@ -32,8 +32,12 @@
@@ -206,7 +206,7 @@ index 727f1d5..c1a6183 100644
  #include <sys/types.h>
  #include <sys/time.h>
  #include <sys/resource.h>
-@@ -1175,7 +1179,7 @@ int create_inode(char *pathname, struct inode *i)
+@@ -1182,7 +1186,7 @@ int create_inode(char *pathname, struct inode *i)
  			break;
  		case SQUASHFS_SYMLINK_TYPE:
  		case SQUASHFS_LSYMLINK_TYPE: {
@@ -215,7 +215,7 @@ index 727f1d5..c1a6183 100644
  				{ i->time, 0 },
  				{ i->time, 0 }
  			};
-@@ -1194,8 +1198,7 @@ int create_inode(char *pathname, struct inode *i)
+@@ -1201,8 +1205,7 @@ int create_inode(char *pathname, struct inode *i)
  				goto failed;
  			}
  
@@ -225,7 +225,7 @@ index 727f1d5..c1a6183 100644
  			if(res == -1) {
  				EXIT_UNSQUASH_STRICT("create_inode: failed to"
  					" set time on %s, because %s\n",
-@@ -2683,6 +2686,7 @@ void initialise_threads(int fragment_buffer_size, int data_buffer_size, int cat_
+@@ -2687,6 +2690,7 @@ void initialise_threads(int fragment_buffer_size, int data_buffer_size, int cat_
  		sigemptyset(&sigmask);
  		sigaddset(&sigmask, SIGQUIT);
  		sigaddset(&sigmask, SIGHUP);
@@ -234,7 +234,7 @@ index 727f1d5..c1a6183 100644
  			EXIT_UNSQUASH("Failed to set signal mask in initialise_threads\n");
  
 diff --git a/squashfs-tools/unsquashfs.h b/squashfs-tools/unsquashfs.h
-index 934618b..0e680ab 100644
+index 1099678..5b6a038 100644
 --- a/squashfs-tools/unsquashfs.h
 +++ b/squashfs-tools/unsquashfs.h
 @@ -46,6 +46,10 @@
@@ -249,7 +249,7 @@ index 934618b..0e680ab 100644
  #include "squashfs_fs.h"
  #include "unsquashfs_error.h"
 diff --git a/squashfs-tools/unsquashfs_info.c b/squashfs-tools/unsquashfs_info.c
-index c8e2b9b..7d4f7af 100644
+index e906eaf..f1e68c2 100644
 --- a/squashfs-tools/unsquashfs_info.c
 +++ b/squashfs-tools/unsquashfs_info.c
 @@ -96,31 +96,22 @@ void dump_state()
@@ -304,7 +304,7 @@ index c8e2b9b..7d4f7af 100644
  }
  
 diff --git a/squashfs-tools/unsquashfs_xattr.c b/squashfs-tools/unsquashfs_xattr.c
-index 7742dfe..f8cd3b6 100644
+index 61910e1..73e0090 100644
 --- a/squashfs-tools/unsquashfs_xattr.c
 +++ b/squashfs-tools/unsquashfs_xattr.c
 @@ -27,6 +27,11 @@
@@ -320,7 +320,7 @@ index 7742dfe..f8cd3b6 100644
  
  extern int root_process;
 diff --git a/squashfs-tools/xattr.c b/squashfs-tools/xattr.c
-index 64dfd82..d82d186 100644
+index b1c0089..6d7ed98 100644
 --- a/squashfs-tools/xattr.c
 +++ b/squashfs-tools/xattr.c
 @@ -22,6 +22,14 @@
@@ -353,5 +353,5 @@ index 64dfd82..d82d186 100644
  #include "squashfs_swap.h"
  #include "mksquashfs.h"
 -- 
-2.23.0
+2.35.1
 
diff --git a/pkgs/tools/filesystems/squashfs/default.nix b/pkgs/tools/filesystems/squashfs/default.nix
index ee6a9d9a4c3..340c5add295 100644
--- a/pkgs/tools/filesystems/squashfs/default.nix
+++ b/pkgs/tools/filesystems/squashfs/default.nix
@@ -2,44 +2,43 @@
 , stdenv
 , fetchFromGitHub
 , fetchpatch
-, zlib
-, xz
+, help2man
 , lz4
 , lzo
-, zstd
 , nixosTests
+, which
+, xz
+, zlib
+, zstd
 }:
 
 stdenv.mkDerivation rec {
   pname = "squashfs";
-  version = "4.5";
+  version = "4.5.1";
 
   src = fetchFromGitHub {
     owner = "plougher";
     repo = "squashfs-tools";
     rev = version;
-    sha256 = "1nanwz5qvsakxfm37md5i7xqagv69nfik9hpj8qlp6ymw266vgxr";
+    sha256 = "sha256-Y3ZPjeE9HN1F+NtGe6EchYziWrTPVQ4SuKaCvNbXMKI=";
   };
 
   patches = [
     # This patch adds an option to pad filesystems (increasing size) in
     # exchange for better chunking / binary diff calculation.
     ./4k-align.patch
-    # Otherwise sizes of some files may break in our ISO; see
-    # https://github.com/NixOS/nixpkgs/issues/132286
-    (fetchpatch {
-      url = "https://github.com/plougher/squashfs-tools/commit/19b161c1cd3e31f7a396ea92dea4390ad43f27b9.diff";
-      sha256 = "15ng8m2my3a6a9hnfx474bip2vwdh08hzs2k0l5gwd36jv2z1h3f";
-    })
   ] ++ lib.optional stdenv.isDarwin ./darwin.patch;
 
-  buildInputs = [ zlib xz zstd lz4 lzo ];
+  buildInputs = [ zlib xz zstd lz4 lzo which help2man ];
 
   preBuild = ''
     cd squashfs-tools
   '' ;
 
-  installFlags = [ "INSTALL_DIR=${placeholder "out"}/bin" ];
+  installFlags = [
+    "INSTALL_DIR=${placeholder "out"}/bin"
+    "INSTALL_MANPAGES_DIR=${placeholder "out"}/share/man/man1"
+  ];
 
   makeFlags = [
     "XZ_SUPPORT=1"
diff --git a/pkgs/tools/misc/broadlink-cli/default.nix b/pkgs/tools/misc/broadlink-cli/default.nix
index b71b5b444e3..ee99e643b1f 100644
--- a/pkgs/tools/misc/broadlink-cli/default.nix
+++ b/pkgs/tools/misc/broadlink-cli/default.nix
@@ -2,7 +2,7 @@
 
 python3Packages.buildPythonApplication rec {
   pname = "broadlink-cli";
-  version = "0.18.0";
+  version = "0.18.1";
 
   # the tools are available as part of the source distribution from GH but
   # not pypi, so we have to fetch them here.
@@ -10,7 +10,7 @@ python3Packages.buildPythonApplication rec {
     owner  = "mjg59";
     repo   = "python-broadlink";
     rev    = version;
-    sha256 = "0nh9rn1zpc44qsc50360ycg02gwbgq59784mnkp01nhavnwwwx10";
+    sha256 = "sha256-x7RVCu5xOwhUOxXIHP7ZAe1/9F9ecf9RgL9I53e9Mcw=";
   };
 
   format = "other";
diff --git a/pkgs/tools/misc/diffoscope/default.nix b/pkgs/tools/misc/diffoscope/default.nix
index 7f1655b9843..17f57d31ab8 100644
--- a/pkgs/tools/misc/diffoscope/default.nix
+++ b/pkgs/tools/misc/diffoscope/default.nix
@@ -50,7 +50,7 @@ python3Packages.buildPythonApplication rec {
       xz zip zstd
     ]
     ++ (with python3Packages; [
-      argcomplete black debian defusedxml jsondiff jsbeautifier libarchive-c
+      argcomplete debian defusedxml jsondiff jsbeautifier libarchive-c
       python_magic progressbar33 pypdf2 rpm tlsh
     ])
     ++ lib.optionals stdenv.isLinux [ python3Packages.pyxattr acl cdrkit dtc ]
@@ -66,20 +66,19 @@ python3Packages.buildPythonApplication rec {
     installManPage doc/diffoscope.1
   '';
 
-  # Disable flaky test and a failing one
   disabledTests = [
+    # Disable flaky test and a failing one
     "test_android_manifest"
     "test_sbin_added_to_path"
     "test_diff_meta"
     "test_diff_meta2"
     "test_obj_no_differences"
 
-    # Failing because of file-v5.40 has a slightly different output.
-    # Upstream issue: https://salsa.debian.org/reproducible-builds/diffoscope/-/issues/271
-    "test_text_proper_indentation"
-
     # fails because it fails to determine llvm version
     "test_item3_deflate_llvm_bitcode"
+
+    # disable formatting tests because they can break on black updates
+    "test_code_is_black_clean"
   ] ++ lib.optionals stdenv.isDarwin [
     # Disable flaky tests on Darwin
     "test_non_unicode_filename"
diff --git a/pkgs/tools/misc/dua/default.nix b/pkgs/tools/misc/dua/default.nix
index f0984696fe3..d7f3eafe3ec 100644
--- a/pkgs/tools/misc/dua/default.nix
+++ b/pkgs/tools/misc/dua/default.nix
@@ -2,7 +2,7 @@
 
 rustPlatform.buildRustPackage rec {
   pname = "dua";
-  version = "2.17.0";
+  version = "2.17.1";
 
   buildInputs = lib.optionals stdenv.isDarwin [ libiconv Foundation ];
 
@@ -10,7 +10,7 @@ rustPlatform.buildRustPackage rec {
     owner = "Byron";
     repo = "dua-cli";
     rev = "v${version}";
-    sha256 = "sha256-yac/WUVL10JU1V5f9LYh57yYzZ2JMf24jMd8Mun7OMU=";
+    sha256 = "sha256-58l0E5wwRKbF/ja3fmMMBIONjuwVOxlwdKRT5BeO9MQ=";
     # Remove unicode file names which leads to different checksums on HFS+
     # vs. other filesystems because of unicode normalisation.
     extraPostFetch = ''
@@ -18,7 +18,7 @@ rustPlatform.buildRustPackage rec {
     '';
   };
 
-  cargoSha256 = "sha256-Q0ZLMbnQeG/64QvAIPpa3k+lI6dbSSQcdYb5e2rX8U0=";
+  cargoSha256 = "sha256-nft0wrgTMrI8Tav6NcqPwSF8Q367twIOr1voBsW2488=";
 
   doCheck = false;
 
diff --git a/pkgs/tools/misc/hashit/default.nix b/pkgs/tools/misc/hashit/default.nix
index b9bf5f0ae5f..5971939da17 100644
--- a/pkgs/tools/misc/hashit/default.nix
+++ b/pkgs/tools/misc/hashit/default.nix
@@ -24,7 +24,6 @@ stdenv.mkDerivation rec {
   buildInputs = [
     gtk3
     libgee
-    pantheon.elementary-icon-theme
     pantheon.granite
   ];
 
diff --git a/pkgs/tools/misc/onefetch/default.nix b/pkgs/tools/misc/onefetch/default.nix
index 860a9758d4f..d5e77d4461d 100644
--- a/pkgs/tools/misc/onefetch/default.nix
+++ b/pkgs/tools/misc/onefetch/default.nix
@@ -22,8 +22,15 @@ rustPlatform.buildRustPackage rec {
     sha256 = "sha256-16oiZAyj6haBk6mgUT25pPDUrCMd7pGo2kAQ0gTe2kM=";
   };
 
-  # enable pkg-config feature of zstd
-  cargoPatches = [ ./zstd-pkg-config.patch ];
+  cargoPatches = [
+    # enable pkg-config feature of zstd
+    ./zstd-pkg-config.patch
+    # fix flaky test
+    (fetchpatch {
+      url = "https://github.com/o2sh/onefetch/commit/2c1f2f0b2c666f6ce94af0299f88048dd1d83484.patch";
+      sha256 = "sha256-pI3yCFYkqOmLgKnCwexv1LcCrCkhi44zhEAx0szaMkg=";
+    })
+  ];
 
   cargoSha256 = "sha256-6wnfn33mfye5o/vY1JQX1Lc4+jzHiKKgGsSLxeJWyFc=";
 
diff --git a/pkgs/tools/misc/opentelemetry-collector/contrib.nix b/pkgs/tools/misc/opentelemetry-collector/contrib.nix
index 57a21a2822d..384c448e1d9 100644
--- a/pkgs/tools/misc/opentelemetry-collector/contrib.nix
+++ b/pkgs/tools/misc/opentelemetry-collector/contrib.nix
@@ -6,17 +6,17 @@
 
 buildGoModule rec {
   pname = "opentelemetry-collector-contrib";
-  version = "0.46.0";
+  version = "0.47.0";
 
   src = fetchFromGitHub {
     owner = "open-telemetry";
     repo = "opentelemetry-collector-contrib";
     rev = "v${version}";
-    sha256 = "sha256-VD/gN9lUwzhRTfr8rAQld+4sN+deYhUlNvCphtZncDU=";
+    sha256 = "sha256-IbpQd01uU6/Ihli+gVDjTB8T8cj//XHoZYcDjXD635Q=";
   };
   # proxy vendor to avoid hash missmatches between linux and macOS
   proxyVendor = true;
-  vendorSha256 = "sha256-ojNDDPCo6TGp8BYio/pYykXSLjC5Qplw0WFD9UIiYM4=";
+  vendorSha256 = "sha256-1svBCXfutjXfXfVqVHUTAt9T1ON/qbiS+VCt5kP/YIc=";
 
   subPackages = [ "cmd/otelcontribcol" ];
 
diff --git a/pkgs/tools/misc/steampipe/default.nix b/pkgs/tools/misc/steampipe/default.nix
index cfdb2b79c6d..74fc37800e9 100644
--- a/pkgs/tools/misc/steampipe/default.nix
+++ b/pkgs/tools/misc/steampipe/default.nix
@@ -2,16 +2,16 @@
 
 buildGoModule rec {
   pname = "steampipe";
-  version = "0.13.0";
+  version = "0.13.2";
 
   src = fetchFromGitHub {
     owner = "turbot";
     repo = "steampipe";
     rev = "v${version}";
-    sha256 = "sha256-+QtZmrPE3R98UVSwrC8xoehNKwd0Exg+AZ2BJxBIrfY=";
+    sha256 = "sha256-Ty8yCcxdPa/z1k9xMv8iCybRXfvJKRJIT+oH7MbLWmw=";
   };
 
-  vendorSha256 = "sha256-rRp8pR2cpW88o0KPwuvgSkE263S5oGK/4df4CQSOlRo=";
+  vendorSha256 = "sha256-0jixQcgSXQJAd899EWOUKde5OXZcSZwQfH7LRdQlm7c=";
   proxyVendor = true;
 
   # tests are failing for no obvious reasons
diff --git a/pkgs/tools/networking/corerad/default.nix b/pkgs/tools/networking/corerad/default.nix
index 03962f1e9e2..df6a6315937 100644
--- a/pkgs/tools/networking/corerad/default.nix
+++ b/pkgs/tools/networking/corerad/default.nix
@@ -1,17 +1,17 @@
-{ lib, buildGoModule, fetchFromGitHub, nixosTests }:
+{ lib, buildGo118Module, fetchFromGitHub, nixosTests }:
 
-buildGoModule rec {
+buildGo118Module rec {
   pname = "corerad";
-  version = "1.0.0";
+  version = "1.1.1";
 
   src = fetchFromGitHub {
     owner = "mdlayher";
     repo = "corerad";
     rev = "v${version}";
-    sha256 = "sha256-23f+WJcTf+x9GW+hGUU3/j4Qi9MfcsfQuS7aEU4uGU4=";
+    sha256 = "sha256-2XPWexpr3xGwnvjT08BVq6uf1haPuZGwKswiy/1Z8vE=";
   };
 
-  vendorSha256 = "sha256-SSa+yBZjZ+5vRfzfCtNhF+kRyJ/VMgd9uWqKPwIi8+Y=";
+  vendorSha256 = "sha256-+9KjgbKuAJexdGEKu9hIsHfHsVbKeB5ZtSgFzM2/bOI=";
 
   doCheck = false;
 
diff --git a/pkgs/tools/networking/openapi-generator-cli/unstable.nix b/pkgs/tools/networking/openapi-generator-cli/unstable.nix
index 33eb33e10b0..9af28b3e09e 100644
--- a/pkgs/tools/networking/openapi-generator-cli/unstable.nix
+++ b/pkgs/tools/networking/openapi-generator-cli/unstable.nix
@@ -1,7 +1,7 @@
 { callPackage, lib, stdenv, fetchurl, jre, makeWrapper }:
 
 let this = stdenv.mkDerivation rec {
-  version = "6.0.0-2021-01-18";  # Also update the fetchurl link
+  version = "6.0.0-2022-03-18";  # Also update the fetchurl link
   pname = "openapi-generator-cli";
 
   jarfilename = "${pname}-${version}.jar";
@@ -11,8 +11,8 @@ let this = stdenv.mkDerivation rec {
   ];
 
   src = fetchurl {
-    url = "https://oss.sonatype.org/content/repositories/snapshots/org/openapitools/openapi-generator-cli/6.0.0-SNAPSHOT/openapi-generator-cli-6.0.0-20210118.082537-4.jar";
-    sha256 = "1ji3yw9dp4srlgqxvb21vrcp2bzj4himxsmp8l8zid9nxsc1m71x";
+    url = "https://oss.sonatype.org/content/repositories/snapshots/org/openapitools/openapi-generator-cli/6.0.0-SNAPSHOT/openapi-generator-cli-6.0.0-20220318.042704-93.jar";
+    sha256 = "1h126kpbnpbrsnjrxb09hzb796dwl4g58d6wrh1hhv8svwy5p0bl";
   };
 
   dontUnpack = true;
diff --git a/pkgs/tools/networking/tinyssh/default.nix b/pkgs/tools/networking/tinyssh/default.nix
index ddd2d460f73..36518357e60 100644
--- a/pkgs/tools/networking/tinyssh/default.nix
+++ b/pkgs/tools/networking/tinyssh/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "tinyssh";
-  version = "20220305";
+  version = "20220311";
 
   src = fetchFromGitHub {
     owner = "janmojzis";
     repo = "tinyssh";
     rev = version;
-    sha256 = "sha256-d49saN0I22DZixx5AdvQmx3WM7yzQH5lOKnKbzhlls0=";
+    sha256 = "sha256-+lmPPc2UsNtOfuheWEZHAzmKBilNQ3kNh8ixzDnRjRc=";
   };
 
   preConfigure = ''
diff --git a/pkgs/tools/security/minio-certgen/default.nix b/pkgs/tools/security/minio-certgen/default.nix
index ee4413c9041..16dbfdf63ad 100644
--- a/pkgs/tools/security/minio-certgen/default.nix
+++ b/pkgs/tools/security/minio-certgen/default.nix
@@ -2,13 +2,13 @@
 
 buildGoModule rec {
   pname = "minio-certgen";
-  version = "1.1.0";
+  version = "1.2.0";
 
   src = fetchFromGitHub {
     owner = "minio";
     repo = "certgen";
     rev = "v${version}";
-    sha256 = "sha256-Qs+wpx9pRdWdY9FrBaKM8gdB0+POy80I6DB4UaBsJEE=";
+    sha256 = "sha256-FBx4v29ZuhXwubWivIXReO5Ge/rPt1J3LbXlprC7E9c=";
   };
 
   vendorSha256 = null;
diff --git a/pkgs/tools/text/snippetpixie/default.nix b/pkgs/tools/text/snippetpixie/default.nix
index 7102977e8ee..a83135b5024 100644
--- a/pkgs/tools/text/snippetpixie/default.nix
+++ b/pkgs/tools/text/snippetpixie/default.nix
@@ -56,8 +56,6 @@ stdenv.mkDerivation rec {
     json-glib
     xorg.libXtst
     pantheon.granite
-    pantheon.elementary-gtk-theme
-    pantheon.elementary-icon-theme
   ];
 
   doCheck = true;
diff --git a/pkgs/tools/text/vale/default.nix b/pkgs/tools/text/vale/default.nix
index 91ac9ae2e12..9ecd7f44cfa 100644
--- a/pkgs/tools/text/vale/default.nix
+++ b/pkgs/tools/text/vale/default.nix
@@ -2,7 +2,7 @@
 
 buildGoModule rec {
   pname = "vale";
-  version = "2.15.2";
+  version = "2.15.3";
 
   subPackages = [ "cmd/vale" ];
   outputs = [ "out" "data" ];
@@ -11,10 +11,10 @@ buildGoModule rec {
     owner = "errata-ai";
     repo = "vale";
     rev = "v${version}";
-    sha256 = "sha256-4KP4/mfuHTYxOwWqHOa2RNHgElOg6YNAEFmiuF/sHa8=";
+    sha256 = "sha256-vhsn72xCe1wC4YRdo2m49iUj/3nVl0cyfeSmWRfESaY=";
   };
 
-  vendorSha256 = "sha256-2vYe943HHybOLcP8nDJe7RimMRIJdND2UPwtwB2mttE=";
+  vendorSha256 = "sha256-/0H35PGFFPch4jDnQ/ggctyHZJ5W/C1PNlkT5zzvI3M=";
 
   postInstall = ''
     mkdir -p $data/share/vale
diff --git a/pkgs/tools/wayland/swaykbdd/default.nix b/pkgs/tools/wayland/swaykbdd/default.nix
index 4584018596d..e4f480dba8e 100644
--- a/pkgs/tools/wayland/swaykbdd/default.nix
+++ b/pkgs/tools/wayland/swaykbdd/default.nix
@@ -2,13 +2,13 @@
 
 stdenv.mkDerivation rec {
   pname = "swaykbdd";
-  version = "1.0";
+  version = "1.1";
 
   src = fetchFromGitHub {
     owner = "artemsen";
     repo = "swaykbdd";
     rev = "v${version}";
-    sha256 = "101mbf7pvm4ykmkh29g5xswrzdhdwq0rslzxqqfd5ksf1fphzgxd";
+    sha256 = "sha256-umYPVkkYeu6TyVkjDsVBsRZLYh8WyseCPdih85kTz6A=";
   };
 
   nativeBuildInputs = [ meson ninja pkg-config ];
diff --git a/pkgs/top-level/aliases.nix b/pkgs/top-level/aliases.nix
index b68d1b0b27c..1df589661cf 100644
--- a/pkgs/top-level/aliases.nix
+++ b/pkgs/top-level/aliases.nix
@@ -308,6 +308,7 @@ mapAliases ({
   epoxy = libepoxy; # Added 2021-11-11
   esniper = throw "esniper has been removed because upstream no longer maintains it (and it no longer works)"; # Added 2021-04-12
   etcdctl = throw "'etcdctl' has been renamed to/replaced by 'etcd'"; # Converted to throw 2022-02-22
+  eteroj.lv2 = throw "'eteroj.lv2' has been renamed to/replaced by 'open-music-kontrollers.eteroj'"; # Added 2022-03-09
   euca2tools = throw "euca2ools has been removed because it is unmaintained upstream and still uses python2"; # Added 2022-01-01
   evilvte = throw "evilvte has been removed from nixpkgs for being unmaintained with security issues and dependant on an old version of vte which was removed"; # Added 2022-01-14
   evolution_data_server = throw "'evolution_data_server' has been renamed to/replaced by 'evolution-data-server'"; # Converted to throw 2022-02-22
@@ -855,6 +856,7 @@ mapAliases ({
   parity = openethereum; # Added 2020-08-01
   parity-ui = throw "parity-ui was removed because it was broken and unmaintained by upstream"; # Added 2022-01-10
   parquet-cpp = throw "'parquet-cpp' has been renamed to/replaced by 'arrow-cpp'"; # Converted to throw 2022-02-22
+  patchmatrix = throw "'patchmatrix' has been renamed to/replaced by 'open-music-kontrollers.patchmatrix'"; # Added 2022-03-09
   pass-otp = throw "'pass-otp' has been renamed to/replaced by 'pass.withExtensions'"; # Converted to throw 2022-02-22
   pdfmod = throw "pdfmod has been removed"; # Added 2022-01-15
   pdfread = throw "pdfread has been remove because it is unmaintained for years and the sources are no longer available"; # Added 2021-07-22
diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix
index 6b6b5b424fa..26ffdb57755 100644
--- a/pkgs/top-level/all-packages.nix
+++ b/pkgs/top-level/all-packages.nix
@@ -1041,6 +1041,8 @@ with pkgs;
 
   albert = libsForQt5.callPackage ../applications/misc/albert {};
 
+  allure = callPackage ../development/tools/allure {};
+
   aquosctl = callPackage ../tools/misc/aquosctl { };
 
   arch-install-scripts = callPackage ../tools/misc/arch-install-scripts {};
@@ -3594,6 +3596,8 @@ with pkgs;
 
   mrkd = with python3Packages; toPythonApplication mrkd;
 
+  naproche = callPackage ../applications/science/logic/naproche { };
+
   nautilus-open-any-terminal = callPackage ../tools/misc/nautilus-open-any-terminal { };
 
   n2n = callPackage ../tools/networking/n2n { };
@@ -3743,6 +3747,8 @@ with pkgs;
 
   psrecord = python3Packages.callPackage ../tools/misc/psrecord {};
 
+  rare = python3Packages.callPackage ../games/rare { };
+
   reg = callPackage ../tools/virtualization/reg { };
 
   river = callPackage ../applications/window-managers/river { };
@@ -4160,6 +4166,8 @@ with pkgs;
     stdenv = gcc9Stdenv;
   };
 
+  cider = callPackage ../applications/audio/cider { };
+
   isolyzer = callPackage ../tools/cd-dvd/isolyzer { };
 
   isomd5sum = callPackage ../tools/cd-dvd/isomd5sum { };
@@ -10556,6 +10564,8 @@ with pkgs;
 
   tokio-console = callPackage ../development/tools/tokio-console { };
 
+  toml2json = callPackage ../development/tools/toml2json { };
+
   toml2nix = (callPackage ../tools/toml2nix { }).toml2nix { };
 
   topgrade = callPackage ../tools/misc/topgrade {
@@ -19992,6 +20002,8 @@ with pkgs;
 
   ronn = callPackage ../development/tools/ronn { };
 
+  rover = callPackage ../development/tools/rover { };
+
   rshell = python3.pkgs.callPackage ../development/embedded/rshell { };
 
   rttr = callPackage ../development/libraries/rttr { };
@@ -25504,8 +25516,6 @@ with pkgs;
 
   espeakedit = callPackage ../applications/audio/espeak/edit.nix { };
 
-  eteroj.lv2 = libsForQt5.callPackage ../applications/audio/eteroj.lv2 { };
-
   etebase-server = with python3Packages; toPythonApplication etebase-server;
 
   etesync-dav = callPackage ../applications/misc/etesync-dav {};
@@ -27378,6 +27388,8 @@ with pkgs;
 
   lv2bm = callPackage ../applications/audio/lv2bm { };
 
+  lv2lint = callPackage ../applications/audio/lv2lint/default.nix { };
+
   lv2-cpp-tools = callPackage ../applications/audio/lv2-cpp-tools { };
 
   lxi-tools = callPackage ../tools/networking/lxi-tools { };
@@ -28188,6 +28200,20 @@ with pkgs;
 
   openjump = callPackage ../applications/misc/openjump { };
 
+  open-music-kontrollers = lib.recurseIntoAttrs {
+    eteroj = callPackage ../applications/audio/open-music-kontrollers/eteroj.nix { };
+    jit = callPackage ../applications/audio/open-music-kontrollers/jit.nix { };
+    mephisto = callPackage ../applications/audio/open-music-kontrollers/mephisto.nix { };
+    midi_matrix = callPackage ../applications/audio/open-music-kontrollers/midi_matrix.nix { };
+    moony = callPackage ../applications/audio/open-music-kontrollers/moony.nix { };
+    orbit = callPackage ../applications/audio/open-music-kontrollers/orbit.nix { };
+    patchmatrix = callPackage ../applications/audio/open-music-kontrollers/patchmatrix.nix { };
+    router = callPackage ../applications/audio/open-music-kontrollers/router.nix { };
+    sherlock = callPackage ../applications/audio/open-music-kontrollers/sherlock.nix { };
+    synthpod = callPackage ../applications/audio/open-music-kontrollers/synthpod.nix { };
+    vm = callPackage ../applications/audio/open-music-kontrollers/vm.nix { };
+  };
+
   openorienteering-mapper = libsForQt5.callPackage ../applications/gis/openorienteering-mapper { };
 
   openscad = libsForQt5.callPackage ../applications/graphics/openscad {};
@@ -28289,8 +28315,6 @@ with pkgs;
 
   capture = callPackage ../tools/misc/capture {};
 
-  patchmatrix = callPackage ../applications/audio/patchmatrix { };
-
   pbrt = callPackage ../applications/graphics/pbrt { };
 
   pcloud = callPackage ../applications/networking/pcloud { };
diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix
index 92da9eec9a6..0f4a3a9fba5 100644
--- a/pkgs/top-level/python-packages.nix
+++ b/pkgs/top-level/python-packages.nix
@@ -3601,7 +3601,9 @@ in {
 
   gruut = callPackage ../development/python-modules/gruut { };
 
-  gruut-ipa = callPackage ../development/python-modules/gruut-ipa { };
+  gruut-ipa = callPackage ../development/python-modules/gruut-ipa {
+    inherit (pkgs) espeak;
+  };
 
   gsd = callPackage ../development/python-modules/gsd { };
 
@@ -5157,6 +5159,8 @@ in {
   mizani = callPackage ../development/python-modules/mizani { };
 
   mkdocs = callPackage ../development/python-modules/mkdocs { };
+  mkdocs-material = callPackage ../development/python-modules/mkdocs-material { };
+  mkdocs-material-extensions = callPackage ../development/python-modules/mkdocs-material/mkdocs-material-extensions.nix { };
 
   mkl-service = callPackage ../development/python-modules/mkl-service { };