NixOS / nixpkgs

Nix Packages collection & NixOS
MIT License
18.39k stars 14.34k forks source link

Add a system service for ssh-tpm-agent #353096

Open arunoruto opened 1 month ago

arunoruto commented 1 month ago

Describe the bug

The ssh-tpm-agent is able to install services onto the target system to generate host keys and enable a socket for communication. I was wondering if the same could also be done in NixOS, by having a service.ssh-tpm.agent which can be enabled. I tried to replicate the services ssh-tpm-genkeys.service, ssh-tpm-agent.service, and the socket ssh-tpm-agent.socket, together with a modification to sshd to enable the generated keys

{
  config,
  lib,
  pkgs,
  ...
}:
let
  cfg = config.services.ssh-tpm-agent;
  # socket = "/run/ssh-tpm-agent/socket";
  socket = "/var/tmp/ssh-tpm-agent.sock";
in
{
  options.services.ssh-tpm-agent = {
    enable = lib.mkOption {
      type = lib.types.bool;
      default = false;
      description = ''
        Start the SSH TPM agent on login.
      '';
    };

    package = lib.mkPackageOption pkgs "ssh-tpm-agent" { };
  };

  config = lib.mkIf cfg.enable {
    security.tpm2 = {
      enable = lib.mkDefault true;
      pkcs11.enable = lib.mkDefault true;
    };

    environment.systemPackages = [ cfg.package ];

    systemd = {
      packages = [ cfg.package ];

      # System services
      services = {
        ssh-tpm-genkeys = rec {
          description = "SSH TPM Key Generation";
          unitConfig = {
            Description = description;
            ConditionPathExists = [
              "|!/etc/ssh/ssh_tpm_host_ecdsa_key.tpm"
              "|!/etc/ssh/ssh_tpm_host_ecdsa_key.pub"
              "|!/etc/ssh/ssh_tpm_host_rsa_key.tpm"
              "|!/etc/ssh/ssh_tpm_host_rsa_key.pub"
            ];
          };
          serviceConfig = {
            Type = "oneshot";
            ExecStart = ''${cfg.package}/bin/ssh-tpm-keygen -A'';
            RemainAfterExit = "yes";
          };
        };

        ssh-tpm-agent = {
          unitConfig = {
            ConditionEnvironment = "!SSH_AGENT_PID";
            Description = "ssh-tpm-agent service";
            Documentation = "man:ssh-agent(1) man:ssh-add(1) man:ssh(1)";
            Wants = "ssh-tpm-genkeys.service";
            After = [
              "ssh-tpm-genkeys.service"
              "network.target"
              "sshd.target"
            ];
            Requires = "ssh-tpm-agent.socket";
          };

          serviceConfig = {
            ExecStart = "${cfg.package}/bin/ssh-tpm-agent --key-dir /etc/ssh";
            PassEnvironment = "SSH_AGENT_PID";
            KillMode = "process";
            Restart = "always";
          };

          wantedBy = [ "multi-user.target" ];
        };
      };
      sockets = {
        ssh-tpm-agent = {
          unitConfig = {
            Description = "SSH TPM agent socket";
            Documentation = "man:ssh-agent(1) man:ssh-add(1) man:ssh(1)";
          };

          socketConfig = {
            ListenStream = socket;
            SocketMode = "0600";
            Service = "ssh-tpm-agent.service";
          };

          wantedBy = [ "sockets.target" ];
        };
      };

      # User services
      user = {
        services.ssh-tpm-agent = {
          unitConfig = {
            ConditionEnvironment = "!SSH_AGENT_PID";
            Description = "ssh-tpm-agent service";
            Documentation = "man:ssh-agent(1) man:ssh-add(1) man:ssh(1)";
            Requires = "ssh-tpm-agent.socket";
          };

          serviceConfig = {
            Environment = "SSH_AUTH_SOCK=%t/ssh-tpm-agent.sock";
            ExecStart = "${cfg.package}/bin/ssh-tpm-agent";
            PassEnvironment = "SSH_AGENT_PID";
            SuccessExitStatus = "2";
            Type = "simple";
          };

          wantedBy = [ "default.target" ];
        };
        sockets.ssh-tpm-agent = {
          unitConfig = {
            Description = "SSH TPM agent socket";
            Documentation = "man:ssh-agent(1) man:ssh-add(1) man:ssh(1)";
          };

          socketConfig = {
            ListenStream = "%t/ssh-tpm-agent.sock";
            SocketMode = "0600";
            Service = "ssh-tpm-agent.service";
          };

          wantedBy = [ "sockets.target" ];
        };
      };
    };

    services.openssh.extraConfig = ''
      HostKeyAgent ${socket}
      HostKey /etc/ssh/ssh_tpm_host_ecdsa_key.pub
      HostKey /etc/ssh/ssh_tpm_host_rsa_key.pub
    '';
  };
}

The services are also running without any issue, but when I run sudo ssh-tpm-hostkeys I get a 2024/11/02 12:36:56 dial unix /var/tmp/ssh-tpm-agent.sock: connect: connection refused.

EDIT add a config for the potential service inspired by the yubikey-agent and added a user service and socket. The only deviation is that I can not replicate the Also=ssh-agent.socket in the user ssh-tpm-agent.service, so I opted for a wantedBy = [ "default.target" ]. The only "issue" currently present, is to move the socket to a different location.

Steps To Reproduce

Steps to reproduce the behavior:

  1. Add the aforementioned config to your setup
  2. Build your system
  3. Check that all services have started
  4. Run sudo ssh-tpm-hostkeys It was a type in the socket name...

Expected behavior

A list of keys found on the host should be displayed

Additional context

Currently the socket is expected to be at /var/tmp/ssh-tpm-agent.sock. It would be nice to be able to move it to /run/ssh-tpm-agent/socket, so it follows a more similar schema like the other sockets. This would need a modification to the source code, since the path is hard coded.

Notify maintainers

@stigtsp, Could you look over the system config and tell me if I did something wrong and if it would make sense to move that to a services.ssh-tpm-agent?

Metadata

Add a :+1: reaction to issues you find important.

terrorbyte commented 3 weeks ago

Currently the socket is expected to be at /var/tmp/ssh-tpm-agent.sock.

It is probably possible to override that hardcoded path with a -ldflags="-X 'socket=/run/ssh-tpm-agent/socket' build-time linker flag without doing a source modification, but that will also be a change to the package itself.

arunoruto commented 3 weeks ago

Currently the socket is expected to be at /var/tmp/ssh-tpm-agent.sock.

It is probably possible to override that hardcoded path with a -ldflags="-X 'socket=/run/ssh-tpm-agent/socket' build-time linker flag without doing a source modification, but that will also be a change to the package itself.

Thank for the hint! I guess that could be changed for the ssh-tpm-agent package only for Nix. Currently I am running my setup from above, but if more people are interested, I could make a PR our of it :)

stigtsp commented 1 week ago

Hi @arunoruto !

This looks pretty cool, I'd be interested in reviewing a PR for a NixOS module for ssh-tpm-agent.

Some thoughts:

arunoruto commented 1 week ago

This looks pretty cool, I'd be interested in reviewing a PR for a NixOS module for ssh-tpm-agent.

Awesome! I will see if I can make a PR in a few hours, when I am back home :)

  • It would be great to focus on one use case first, maybe first the common usecase where a user wants to use it as an ssh-agent for signing and logging in via ssh similar to programs.ssh.startAgent

Fair enough. We can then built upon the initial idea with further PRs and feedback. Would you strap off something from the example config above? Or do you just mean my point about altering the socket location?

  • Consider that ssh-tpm-agent has a pretty cool agent proxy feature, I'm not quite sure how to combine that with other agents like seahorse, ssh-agent and gnupg.

I have something like this in my current config implemented. I would also propose to include the hostSocket path if someone decides to place the socket somewhere else and the userProxyPath, so the proxy feature can be used!

stigtsp commented 1 week ago

We can then built upon the initial idea with further PRs and feedback. Would you strap off something from the example config above? Or do you just mean my point about altering the socket location?

Your config is likely a good starting point. I also think we should alter the socket location, maybe via SSH_AUTH_SOCK in a systemd user unit, so we're not dependent on patching or upstream changes.

It would also be great to have some NixOS tests for this as well. The test VMs can run a vTPM, so would be great to use those.

I would also propose to include the hostSocket path if someone decides to place the socket somewhere else and the userProxyPath, so the proxy feature can be used!

Sounds good to me.

arunoruto commented 1 week ago

I made a draft with my config as a baseline with some minor alterations. Any feedback is appreciated!