Refactor user management in Ansible playbooks to standardize on keeper_user

- Updated user addition tasks across multiple playbooks (mcp_switchboard, mcpo, neo4j, neo4j_mcp, openwebui, postgresql, rabbitmq, searxng, smtp4dev) to replace references to ansible_user and remote_user with keeper_user.
- Modified PostgreSQL deployment to create directories and manage files under keeper_user's home.
- Enhanced documentation to clarify account taxonomy and usage of keeper_user in playbooks.
- Introduced new deployment for Agent S, including environment setup, desktop environment installation, XRDP configuration, and accessibility support.
- Added staging playbook for preparing release tarballs from local repositories.
- Created templates for XRDP configuration and environment activation scripts.
- Removed obsolete sunwait documentation.
This commit is contained in:
2026-03-05 10:37:41 +00:00
parent b4d60f2f38
commit 042df52bca
35 changed files with 610 additions and 298 deletions

View File

@@ -3,12 +3,24 @@
hosts: agent_s hosts: agent_s
become: yes become: yes
vars: vars:
system_user: "{{ ansible_user }}" agent_s_venv: "/home/{{principal_user}}/env/agents"
agent_s_venv: "/home/{{ system_user }}/env/agents" agent_s_repo: "/home/{{principal_user}}/gh/Agent-S"
agent_s_repo: "/home/{{ system_user }}/gh/Agent-S"
chrome_deb_url: "https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb" chrome_deb_url: "https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb"
build_dir: "/usr/local/src/pulseaudio-module-xrdp"
tasks: tasks:
# -------------------------------------------------------------------------
# Principal user - AI agent operates on behalf of this human user
# Must exist before any become_user tasks can run
# -------------------------------------------------------------------------
- name: Create principal_user account
user:
name: "{{principal_user}}"
uid: 1000
shell: /bin/bash
create_home: true
state: present
# Disable snap - doesn't work in containers with AppArmor disabled # Disable snap - doesn't work in containers with AppArmor disabled
- name: Prevent snapd from being installed - name: Prevent snapd from being installed
copy: copy:
@@ -68,6 +80,30 @@
- ubuntu-mate-desktop - ubuntu-mate-desktop
state: present state: present
# -------------------------------------------------------------------------
# XRDP - Remote Desktop Protocol server
# -------------------------------------------------------------------------
- name: Install XRDP and xorgxrdp
apt:
name:
- xrdp
- xorgxrdp
state: present
- name: Add xrdp user to ssl-cert group
user:
name: xrdp
groups: ssl-cert
append: yes
- name: Enable and start XRDP service
systemd:
name: xrdp
enabled: yes
state: started
daemon_reload: yes
# AT-SPI Accessibility Stack # AT-SPI Accessibility Stack
- name: Install AT-SPI accessibility infrastructure - name: Install AT-SPI accessibility infrastructure
apt: apt:
@@ -88,31 +124,116 @@
export ACCESSIBILITY_ENABLED=1 export ACCESSIBILITY_ENABLED=1
mode: '0644' mode: '0644'
- name: Configure GPU environment for direct rendering # -------------------------------------------------------------------------
copy: # Sound Support - PulseAudio + module-xrdp for RDP audio redirection
dest: /etc/profile.d/gpu.sh # -------------------------------------------------------------------------
content: |
# Force GPU rendering via AMD render node
export DRI_PRIME=1
export LIBVA_DRIVER_NAME=radeonsi
export MESA_LOADER_DRIVER_OVERRIDE=radeonsi
# Chrome/Chromium GPU flags
export CHROMIUM_FLAGS="--enable-gpu-rasterization --enable-zero-copy --use-gl=egl"
mode: '0644'
# Sound Support - name: Install sound support and build dependencies
- name: Install sound support packages
apt: apt:
name: name:
- git - git
- pulseaudio
- libpulse-dev - libpulse-dev
- autoconf - autoconf
- m4 - m4
- intltool - intltool
- build-essential - build-essential
- dpkg-dev - dpkg-dev
- meson
- ninja-build
state: present state: present
- name: Enable deb-src repositories for PulseAudio source
shell: |
sed -i '/^Types: deb$/s/$/ deb-src/' /etc/apt/sources.list.d/ubuntu.sources 2>/dev/null || \
find /etc/apt/sources.list.d/ -name '*.sources' -exec sed -i '/^Types: deb$/s/$/ deb-src/' {} \;
args:
creates: /usr/local/src/.deb_src_enabled
register: deb_src_result
- name: Mark deb-src as enabled
file:
path: /usr/local/src/.deb_src_enabled
state: touch
mode: '0644'
when: deb_src_result.changed
- name: Update apt cache after enabling deb-src
apt:
update_cache: yes
when: deb_src_result.changed
- name: Install PulseAudio build dependencies
apt:
name: pulseaudio
state: build-dep
when: deb_src_result.changed
- name: Create build directory
file:
path: /usr/local/src
state: directory
mode: '0755'
- name: Download PulseAudio source
shell: |
cd /usr/local/src && apt-get source pulseaudio
args:
creates: /usr/local/src/.pulseaudio_source_downloaded
- name: Find PulseAudio source directory
shell: ls -d /usr/local/src/pulseaudio-[0-9]*/
register: pulse_src_dir
changed_when: false
- name: Mark PulseAudio source as downloaded
file:
path: /usr/local/src/.pulseaudio_source_downloaded
state: touch
mode: '0644'
- name: Generate PulseAudio config.h with meson
shell: meson setup build
args:
chdir: "{{ pulse_src_dir.stdout | trim }}"
creates: "{{ pulse_src_dir.stdout | trim }}/build/config.h"
- name: Create build directory for pulseaudio-module-xrdp
file:
path: "{{ build_dir }}"
state: directory
mode: '0755'
- name: Transfer and extract pulseaudio-module-xrdp source
ansible.builtin.unarchive:
src: "~/rel/pulseaudio_module_xrdp_{{pulseaudio_module_xrdp_rel}}.tar"
dest: "{{ build_dir }}"
- name: Check if module-xrdp-sink is already installed
shell: find /usr/lib/pulse-*/modules/ -name 'module-xrdp-sink.so' 2>/dev/null | head -1
register: xrdp_sink_check
changed_when: false
failed_when: false
- name: Bootstrap pulseaudio-module-xrdp
shell: ./bootstrap
args:
chdir: "{{ build_dir }}"
when: xrdp_sink_check.stdout == ""
- name: Configure pulseaudio-module-xrdp
shell: "./configure PULSE_DIR={{ pulse_src_dir.stdout | trim }}"
args:
chdir: "{{ build_dir }}"
when: xrdp_sink_check.stdout == ""
- name: Build and install pulseaudio-module-xrdp
shell: make && make install
args:
chdir: "{{ build_dir }}"
when: xrdp_sink_check.stdout == ""
notify: restart xrdp
# Mouse, Assistive Technology, and Python # Mouse, Assistive Technology, and Python
- name: Install assistive technology and Python packages - name: Install assistive technology and Python packages
apt: apt:
@@ -133,100 +254,11 @@
- tesseract-ocr - tesseract-ocr
state: present state: present
# GPU Drivers - AMD Mesa (radeonsi/RADV)
- name: Install AMD GPU drivers and utilities
apt:
name:
- mesa-utils
- mesa-utils-extra
- mesa-vulkan-drivers
- vulkan-tools
- libgl1-mesa-dri
- libglx-mesa0
- libglu1-mesa
- libdrm2
- libdrm-amdgpu1
- libegl1
- libegl-mesa0
- libgbm1
- vainfo
- mesa-va-drivers
state: present
# VirtualGL for GPU-accelerated remote rendering
- name: Check if VirtualGL is installed
command: dpkg -s virtualgl
register: virtualgl_check
failed_when: false
changed_when: false
- name: Download VirtualGL
get_url:
url: https://github.com/VirtualGL/virtualgl/releases/download/3.1.2/virtualgl_3.1.2_amd64.deb
dest: /tmp/virtualgl.deb
mode: '0644'
when: virtualgl_check.rc != 0
- name: Install VirtualGL
apt:
deb: /tmp/virtualgl.deb
state: present
when: virtualgl_check.rc != 0
# GPU Permissions - Add user to video and render groups for DRI access
- name: Add user to video group for GPU access
user:
name: "{{ system_user }}"
groups: video
append: yes
- name: Add user to render group for GPU render node access
user:
name: "{{ system_user }}"
groups: render
append: yes
- name: Create udev rules for GPU device permissions
copy:
dest: /etc/udev/rules.d/99-gpu-permissions.rules
content: |
# Allow video group access to DRI devices
SUBSYSTEM=="drm", KERNEL=="card*", MODE="0666"
SUBSYSTEM=="drm", KERNEL=="renderD*", MODE="0666"
mode: '0644'
notify: Reload udev
# Fix GPU permissions on container start (LXC passthrough doesn't honor udev)
- name: Create systemd service to fix GPU permissions on boot
copy:
dest: /etc/systemd/system/fix-gpu-permissions.service
content: |
[Unit]
Description=Fix GPU device permissions for LXC passthrough
After=local-fs.target
[Service]
Type=oneshot
ExecStart=/bin/chmod 666 /dev/dri/card2 /dev/dri/renderD129
RemainAfterExit=yes
[Install]
WantedBy=multi-user.target
mode: '0644'
notify: Reload systemd
- name: Enable GPU permissions fix service
systemd:
name: fix-gpu-permissions
enabled: yes
state: started
daemon_reload: yes
# Create dl directory # Create dl directory
- name: Create download directory - name: Create download directory
become: no become_user: "{{principal_user}}"
file: file:
path: "/home/{{ system_user }}/dl" path: "/home/{{principal_user}}/dl"
state: directory state: directory
mode: '0755' mode: '0755'
@@ -247,53 +279,22 @@
path: /tmp/google-chrome-stable_current_amd64.deb path: /tmp/google-chrome-stable_current_amd64.deb
state: absent state: absent
# Chrome GPU Configuration - Use ANGLE+Vulkan to bypass broken GLX in XRDP
- name: Create Chrome policies directory
file:
path: /etc/opt/chrome/policies/managed
state: directory
mode: '0755'
- name: Configure Chrome GPU policy
copy:
dest: /etc/opt/chrome/policies/managed/gpu-policy.json
content: |
{
"HardwareAccelerationModeEnabled": true
}
mode: '0644'
- name: Create Chrome Vulkan launcher
copy:
dest: /usr/share/applications/google-chrome-vulkan.desktop
content: |
[Desktop Entry]
Version=1.0
Name=Google Chrome (Vulkan)
GenericName=Web Browser
Exec=/usr/bin/google-chrome-stable --ignore-gpu-blocklist --use-gl=angle --use-angle=vulkan --enable-features=Vulkan,DefaultANGLEVulkan,VulkanFromANGLE,CanvasOopRasterization --enable-gpu-rasterization --canvas-oop-rasterization %U
Terminal=false
Icon=google-chrome
Type=Application
Categories=Network;WebBrowser;
mode: '0644'
# Python Virtual Environment Setup # Python Virtual Environment Setup
- name: Create virtual environment directory - name: Create virtual environment directory
become: no become_user: "{{principal_user}}"
file: file:
path: "/home/{{ system_user }}/env" path: "/home/{{principal_user}}/env"
state: directory state: directory
mode: '0755' mode: '0755'
- name: Create Python virtual environment with system site packages - name: Create Python virtual environment with system site packages
become: no become_user: "{{principal_user}}"
command: python3 -m venv --system-site-packages {{ agent_s_venv }} command: python3 -m venv --system-site-packages {{ agent_s_venv }}
args: args:
creates: "{{ agent_s_venv }}/bin/activate" creates: "{{ agent_s_venv }}/bin/activate"
- name: Install Python packages in virtual environment - name: Install Python packages in virtual environment
become: no become_user: "{{principal_user}}"
pip: pip:
name: name:
- lxml - lxml
@@ -304,25 +305,23 @@
# Clone Agent-S Repository # Clone Agent-S Repository
- name: Create gh directory - name: Create gh directory
become: no become_user: "{{principal_user}}"
file: file:
path: "/home/{{ system_user }}/gh" path: "/home/{{principal_user}}/gh/Agent-S"
state: directory state: directory
mode: '0755' mode: '0755'
- name: Clone Agent-S repository - name: Transfer and extract Agent-S
become: no become_user: "{{principal_user}}"
git: ansible.builtin.unarchive:
repo: https://github.com/simular-ai/Agent-S.git src: "~/rel/agent_s_{{agent_s_rel}}.tar"
dest: "{{ agent_s_repo }}" dest: "{{ agent_s_repo }}"
version: main
update: yes
- name: Create environment activation script - name: Create environment activation script
become: no become_user: "{{principal_user}}"
template: template:
src: agent_s_env.j2 src: agent_s_env.j2
dest: "/home/{{ system_user }}/.agent_s_env" dest: "/home/{{principal_user}}/.agent_s_env"
mode: '0644' mode: '0644'
- name: Create XRDP Xorg config directory - name: Create XRDP Xorg config directory
@@ -331,11 +330,19 @@
state: directory state: directory
mode: '0755' mode: '0755'
- name: Deploy XRDP Xorg configuration for 1024x1024 resolution - name: Configure MATE as XRDP session for principal_user
become_user: "{{principal_user}}"
copy:
dest: "/home/{{principal_user}}/.xsession"
content: "exec mate-session\n"
mode: '0755'
- name: Deploy XRDP Xorg configuration for 1024x768 resolution
template: template:
src: xorg.conf.j2 src: xorg.conf.j2
dest: /etc/X11/xrdp/xorg.conf dest: /etc/X11/xrdp/xorg.conf
mode: '0644' mode: '0644'
notify: restart xrdp
handlers: handlers:
- name: Reload systemd - name: Reload systemd
@@ -344,4 +351,9 @@
- name: Reload udev - name: Reload udev
shell: udevadm control --reload-rules && udevadm trigger shell: udevadm control --reload-rules && udevadm trigger
become: yes become: yes
- name: restart xrdp
systemd:
name: xrdp
state: restarted

48
ansible/agent_s/stage.yml Normal file
View File

@@ -0,0 +1,48 @@
---
- name: Stage Agent S and dependencies
hosts: localhost
gather_facts: false
vars:
agent_s_archive: "{{rel_dir}}/agent_s_{{agent_s_rel}}.tar"
agent_s_repo_dir: "{{github_repo_dir}}/Agent-S"
pulse_xrdp_archive: "{{rel_dir}}/pulseaudio_module_xrdp_{{pulseaudio_module_xrdp_rel}}.tar"
pulse_xrdp_repo_dir: "{{github_repo_dir}}/pulseaudio-module-xrdp"
tasks:
- name: Ensure release directory exists
file:
path: "{{rel_dir}}"
state: directory
mode: '755'
# Agent-S
- name: Fetch all remote branches and tags (Agent-S)
ansible.builtin.command: git fetch --all
args:
chdir: "{{agent_s_repo_dir}}"
- name: Pull latest changes (Agent-S)
ansible.builtin.command: git pull
args:
chdir: "{{agent_s_repo_dir}}"
- name: Create Agent-S archive for specified release
ansible.builtin.command: git archive -o "{{agent_s_archive}}" "{{agent_s_rel}}"
args:
chdir: "{{agent_s_repo_dir}}"
# pulseaudio-module-xrdp
- name: Fetch all remote branches and tags (pulseaudio-module-xrdp)
ansible.builtin.command: git fetch --all
args:
chdir: "{{pulse_xrdp_repo_dir}}"
- name: Pull latest changes (pulseaudio-module-xrdp)
ansible.builtin.command: git pull
args:
chdir: "{{pulse_xrdp_repo_dir}}"
- name: Create pulseaudio-module-xrdp archive for specified release
ansible.builtin.command: git archive -o "{{pulse_xrdp_archive}}" "{{pulseaudio_module_xrdp_rel}}"
args:
chdir: "{{pulse_xrdp_repo_dir}}"

View File

@@ -24,6 +24,7 @@ Section "Module"
Load "int10" Load "int10"
Load "record" Load "record"
Load "vbe" Load "vbe"
Load "glamoregl"
Load "xorgxrdp" Load "xorgxrdp"
Load "fb" Load "fb"
EndSection EndSection
@@ -53,9 +54,6 @@ EndSection
Section "Device" Section "Device"
Identifier "Video Card (xrdpdev)" Identifier "Video Card (xrdpdev)"
Driver "xrdpdev" Driver "xrdpdev"
Option "DRMDevice" "/dev/dri/renderD129"
Option "DRI3" "1"
Option "DRMAllowList" "amdgpu"
EndSection EndSection
Section "Screen" Section "Screen"

View File

@@ -2,5 +2,5 @@
inventory = inventory inventory = inventory
stdout_callback = ansible.builtin.default stdout_callback = ansible.builtin.default
result_format = yaml result_format = yaml
remote_user = robert remote_user = ponos
vault_password_file = .vault_pass vault_password_file = .vault_pass

View File

@@ -62,9 +62,9 @@
system: true system: true
shell: /bin/bash shell: /bin/bash
- name: Add remote_user to anythingllm group - name: Add keeper_user to anythingllm group
ansible.builtin.user: ansible.builtin.user:
name: "{{ remote_user }}" name: "{{ keeper_user }}"
groups: "{{ anythingllm_group }}" groups: "{{ anythingllm_group }}"
append: true append: true

View File

@@ -32,10 +32,10 @@
system: true system: true
create_home: false create_home: false
- name: Add ansible user to argos group - name: Add keeper_user to argos group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{ansible_user}}" name: "{{keeper_user}}"
groups: "{{argos_group}}" groups: "{{argos_group}}"
append: true append: true

View File

@@ -21,10 +21,10 @@
system: true system: true
create_home: false create_home: false
- name: Add remote_user to arke group - name: Add keeper_user to arke group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{arke_group}}" groups: "{{arke_group}}"
append: true append: true

View File

@@ -45,10 +45,10 @@
create_home: false create_home: false
shell: /usr/sbin/nologin shell: /usr/sbin/nologin
- name: Add ansible_user to casdoor group - name: Add keeper_user to casdoor group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{ ansible_user }}" name: "{{ keeper_user }}"
groups: "{{ casdoor_group }}" groups: "{{ casdoor_group }}"
append: true append: true

View File

@@ -54,10 +54,10 @@
# Remove User and Group # Remove User and Group
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
- name: Remove ponos from casdoor group - name: Remove keeper_user from casdoor group
become: true become: true
ansible.builtin.command: ansible.builtin.command:
cmd: gpasswd -d ponos {{ casdoor_group }} cmd: gpasswd -d {{ keeper_user }} {{ casdoor_group }}
register: gpasswd_result register: gpasswd_result
changed_when: gpasswd_result.rc == 0 changed_when: gpasswd_result.rc == 0
failed_when: false failed_when: false

View File

@@ -53,10 +53,10 @@
home: "{{ certbot_directory }}" home: "{{ certbot_directory }}"
create_home: false create_home: false
- name: Add ansible user to certbot group - name: Add keeper_user to certbot group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{ ansible_user }}" name: "{{ keeper_user }}"
groups: "{{ certbot_group }}" groups: "{{ certbot_group }}"
append: true append: true

View File

@@ -33,9 +33,9 @@
enabled: true enabled: true
state: started state: started
- name: Add ansible_user to docker group - name: Add keeper_user to docker group
ansible.builtin.user: ansible.builtin.user:
name: "{{ansible_user}}" name: "{{keeper_user}}"
groups: docker groups: docker
append: true append: true

View File

@@ -24,9 +24,9 @@
group: "{{gitea_mcp_group}}" group: "{{gitea_mcp_group}}"
system: true system: true
- name: Add group gitea_mcp to Ansible remote_user - name: Add group gitea_mcp to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{gitea_mcp_group}}" groups: "{{gitea_mcp_group}}"
append: true append: true

View File

@@ -46,9 +46,9 @@
group: "{{grafana_mcp_group}}" group: "{{grafana_mcp_group}}"
system: true system: true
- name: Add group grafana_mcp to Ansible remote_user - name: Add group grafana_mcp to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{grafana_mcp_group}}" groups: "{{grafana_mcp_group}}"
append: true append: true

View File

@@ -26,10 +26,10 @@
uid: "{{haproxy_uid}}" uid: "{{haproxy_uid}}"
system: true system: true
- name: Add group haproxy to ansible_user - name: Add group haproxy to keeper_user
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{ansible_user}}" name: "{{keeper_user}}"
groups: "{{haproxy_group}}" groups: "{{haproxy_group}}"
append: true append: true

View File

@@ -21,10 +21,10 @@
system: true system: true
create_home: false create_home: false
- name: Add group hass to user {{remote_user}} - name: Add group hass to keeper_user
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{hass_group}}" groups: "{{hass_group}}"
append: true append: true

View File

@@ -1,6 +1,15 @@
# Red Panda Approved Sandbox Environment Variables # Account Taxonomy
remote_user: robert # keeper_user - Ansible/Terraform management account (sudo). Use {{ keeper_user }} in playbooks.
remote_group: robert # watcher_user - Non-sudo observation account.
# principal_user - AI agent / human operator account (host-specific, defined in host_vars).
# NOTE: ansible.cfg retains 'remote_user = ponos' as the Ansible SSH built-in keyword.
# Never use {{ remote_user }} or {{ ansible_user }} as Jinja2 variables in playbooks.
keeper_user: ponos
keeper_uid: 519
keeper_group: ponos
keeper_home: /srv/ponos
watcher_user: poros
watcher_uid: 520
deployment_environment: "agathos" deployment_environment: "agathos"
ansible_python_interpreter: /usr/bin/python3 ansible_python_interpreter: /usr/bin/python3

View File

@@ -8,6 +8,11 @@ services:
- docker - docker
- kernos - kernos
# Account Taxonomy
# principal_user is the AI agent operator account on this host
principal_user: robert
principal_uid: 1000
# Alloy # Alloy
alloy_log_level: "warn" alloy_log_level: "warn"

View File

@@ -21,10 +21,10 @@
system: false system: false
create_home: true create_home: true
- name: Add remote_user to kernos group - name: Add keeper_user to kernos group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{kernos_group}}" groups: "{{kernos_group}}"
append: true append: true

View File

@@ -23,10 +23,10 @@
group: "{{lobechat_group}}" group: "{{lobechat_group}}"
system: true system: true
- name: Add group lobechat to user ponos - name: Add group lobechat to keeper_user
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: ponos name: "{{keeper_user}}"
groups: "{{lobechat_group}}" groups: "{{lobechat_group}}"
append: true append: true

View File

@@ -42,10 +42,10 @@
group: "{{loki_group}}" group: "{{loki_group}}"
system: true system: true
- name: Add group loki to ansible_user - name: Add group loki to keeper_user
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{ansible_user}}" name: "{{keeper_user}}"
groups: "{{loki_group}}" groups: "{{loki_group}}"
append: true append: true

View File

@@ -26,10 +26,10 @@
system: true system: true
create_home: false create_home: false
- name: Add ansible_user to mcp_switchboard group - name: Add keeper_user to mcp_switchboard group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{ansible_user}}" name: "{{keeper_user}}"
groups: "{{mcp_switchboard_group}}" groups: "{{mcp_switchboard_group}}"
append: true append: true

View File

@@ -1,8 +1,6 @@
--- ---
- name: Deploy MCPO as a system service - name: Deploy MCPO as a system service
hosts: mcpo hosts: mcpo
vars:
ansible_common_remote_group: ponos
handlers: handlers:
- name: restart mcpo - name: restart mcpo
become: true become: true
@@ -24,10 +22,10 @@
comment: "{{mcpo_user}}" comment: "{{mcpo_user}}"
system: true system: true
- name: Add remote_user to mcpo group - name: Add keeper_user to mcpo group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{mcpo_group}}" groups: "{{mcpo_group}}"
append: true append: true

View File

@@ -24,9 +24,9 @@
group: "{{neo4j_group}}" group: "{{neo4j_group}}"
system: true system: true
- name: Add group neo4j to user ponos - name: Add group neo4j to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: ponos name: "{{keeper_user}}"
groups: "{{neo4j_group}}" groups: "{{neo4j_group}}"
append: true append: true

View File

@@ -24,9 +24,9 @@
group: "{{neo4j_mcp_group}}" group: "{{neo4j_mcp_group}}"
system: true system: true
- name: Add group neo4j_mcp to user ponos - name: Add group neo4j_mcp to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: ponos name: "{{keeper_user}}"
groups: "{{neo4j_mcp_group}}" groups: "{{neo4j_mcp_group}}"
append: true append: true

View File

@@ -20,10 +20,10 @@
comment: "{{openwebui_user}}" comment: "{{openwebui_user}}"
system: true system: true
- name: Add "remote_user" user to OpenWebUI group - name: Add keeper_user to OpenWebUI group
become: true become: true
ansible.builtin.user: ansible.builtin.user:
name: "{{remote_user}}" name: "{{keeper_user}}"
groups: "{{openwebui_group}}" groups: "{{openwebui_group}}"
append: true append: true

View File

@@ -48,31 +48,31 @@
- name: Create gh directory - name: Create gh directory
become: true become: true
ansible.builtin.file: ansible.builtin.file:
path: /home/{{ remote_user }}/gh path: "{{ keeper_home }}/gh"
state: directory state: directory
owner: "{{ remote_user }}" owner: "{{ keeper_user }}"
group: "{{ remote_user }}" group: "{{ keeper_group }}"
mode: '755' mode: '755'
- name: Clone pgvector repository - name: Clone pgvector repository
become: true become: true
become_user: "{{ remote_user }}" become_user: "{{ keeper_user }}"
ansible.builtin.git: ansible.builtin.git:
repo: https://github.com/pgvector/pgvector.git repo: https://github.com/pgvector/pgvector.git
dest: /home/{{ remote_user }}/gh/pgvector dest: "{{ keeper_home }}/gh/pgvector"
version: v0.8.0 version: v0.8.0
force: true force: true
- name: Build pgvector - name: Build pgvector
become: true become: true
become_user: "{{ remote_user }}" become_user: "{{ keeper_user }}"
ansible.builtin.make: ansible.builtin.make:
chdir: /home/{{ remote_user }}/gh/pgvector chdir: "{{ keeper_home }}/gh/pgvector"
- name: Install pgvector - name: Install pgvector
become: true become: true
ansible.builtin.make: ansible.builtin.make:
chdir: /home/{{ remote_user }}/gh/pgvector chdir: "{{ keeper_home }}/gh/pgvector"
target: install target: install
- name: Ensure PostgreSQL is running - name: Ensure PostgreSQL is running
@@ -121,7 +121,7 @@
- name: Build pgvector with correct pg_config - name: Build pgvector with correct pg_config
become: true become: true
ansible.builtin.shell: | ansible.builtin.shell: |
cd /home/{{ remote_user }}/gh/pgvector cd {{ keeper_home }}/gh/pgvector
make clean make clean
# Use the specific pg_config for the installed version # Use the specific pg_config for the installed version
PG_CONFIG_PATH=$(ls /usr/bin/pg_config-* | head -1) PG_CONFIG_PATH=$(ls /usr/bin/pg_config-* | head -1)

View File

@@ -24,9 +24,9 @@
group: "{{rabbitmq_group}}" group: "{{rabbitmq_group}}"
system: true system: true
- name: Add group rabbitmq to user ponos - name: Add group rabbitmq to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: ponos name: "{{keeper_user}}"
groups: "{{rabbitmq_group}}" groups: "{{rabbitmq_group}}"
append: true append: true

View File

@@ -22,9 +22,9 @@
group: "{{searxng_group}}" group: "{{searxng_group}}"
system: true system: true
- name: Add group searxng to ansible_user - name: Add group searxng to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: "{{ansible_user}}" name: "{{keeper_user}}"
groups: "{{searxng_group}}" groups: "{{searxng_group}}"
append: true append: true

View File

@@ -24,9 +24,9 @@
group: "{{smtp4dev_group}}" group: "{{smtp4dev_group}}"
system: true system: true
- name: Add group smtp4dev to user ponos - name: Add group smtp4dev to keeper_user
ansible.builtin.user: ansible.builtin.user:
name: ponos name: "{{keeper_user}}"
groups: "{{smtp4dev_group}}" groups: "{{smtp4dev_group}}"
append: true append: true

245
docs/agent_s.md Normal file
View File

@@ -0,0 +1,245 @@
# Ansible Deployment for Agent S
Agent S is a computer-use automation agent that controls a desktop environment via RDP. The deployment configures a full graphical stack on `larissa.helu.ca`: MATE desktop, XRDP, audio redirection via PulseAudio, and an AT-SPI accessibility bridge so agents can introspect the UI tree.
## Host
| Host | Group | Type |
|------|-------|------|
| `larissa.helu.ca` | `agent_s` | Incus container |
## Overview
The deployment installs and configures:
- **Principal user** (`robert`, UID 1000) — the human account the agent operates on behalf of
- **MATE desktop** — chosen for strong AT-SPI accessibility support
- **Firefox** from the Mozilla APT repo (bypasses the snap dependency introduced by Ubuntu)
- **Google Chrome** for browser automation
- **XRDP** with a custom Xorg config pinned to 1024×768 for UI-TARS / Agent-S model compatibility
- **PulseAudio + pulseaudio-module-xrdp** — audio redirection over RDP
- **AT-SPI accessibility stack** — allows agents to query the widget tree
- **Python virtual environment** with the Agent-S package and dependencies
- **Agent-S repository** extracted from a staged release tarball
- **Environment activation script** at `~/.agent_s_env`
## Prerequisites
### Control node
- Ansible 2.12+
- SSH access to `larissa.helu.ca`
- Staged release tarballs in `~/rel/` (produced by `stage.yml`):
- `~/rel/agent_s_<agent_s_rel>.tar`
- `~/rel/pulseaudio_module_xrdp_<pulseaudio_module_xrdp_rel>.tar`
### Target host
- Ubuntu 25.04
- Network access to Mozilla APT, Google Chrome DL, and the Ubuntu package mirrors
- deb-src repositories available (the playbook enables them if absent)
## Staging
Before deploying, stage release tarballs from local git checkouts:
```bash
ansible-playbook ansible/agent_s/stage.yml
```
`stage.yml` runs on localhost and creates two tarballs from the configured git branches:
| Tarball | Source repo | Branch variable |
|---------|-------------|-----------------|
| `agent_s_<rel>.tar` | `~/gh/Agent-S` | `agent_s_rel` |
| `pulseaudio_module_xrdp_<rel>.tar` | `~/gh/pulseaudio-module-xrdp` | `pulseaudio_module_xrdp_rel` |
Both variables default to the `all` group vars (`agent_s_rel: main`, `pulseaudio_module_xrdp_rel: devel`).
## Deployment
```bash
ansible-playbook ansible/agent_s/deploy.yml
```
### Phase 1 — Principal user and snap suppression
Creates the `robert` account (UID 1000) and pins `snapd` at priority -10 via `/etc/apt/preferences.d/nosnap.pref`. This must run before the desktop install so `ubuntu-mate-desktop` cannot pull in snap-packaged Firefox.
### Phase 2 — Firefox
Adds the Mozilla APT repo and pins all `packages.mozilla.org` packages at priority 1000, ensuring the system installs the .deb Firefox rather than the snap.
### Phase 3 — MATE desktop
Installs `ubuntu-mate-desktop`. MATE is preferred over GNOME because its AT-SPI bridge is more reliable in a headless XRDP session.
### Phase 4 — XRDP
Installs `xrdp` and `xorgxrdp`, adds the `xrdp` user to `ssl-cert`, and enables the service. The Xorg configuration is deployed separately (Phase 8).
### Phase 5 — AT-SPI accessibility
Installs the AT-SPI core libraries and adds `/etc/profile.d/atspi.sh` to set:
```bash
GTK_MODULES=gail:atk-bridge
NO_AT_BRIDGE=0
ACCESSIBILITY_ENABLED=1
```
These environment variables are picked up by MATE applications when launched from `.xsession`, making the widget tree available to AT-SPI consumers such as `pyatspi`.
### Phase 6 — PulseAudio and RDP audio
See [Sound device configuration](#sound-device-configuration) below.
### Phase 7 — Packages, Chrome, Python environment, Agent-S
- Installs OCR support (`tesseract-ocr`), Python 3, and assistive tech libraries
- Downloads and installs Google Chrome from the official .deb
- Creates a Python venv at `~/env/agents` with `--system-site-packages` (so `pyatspi` and `python3-gi`, installed system-wide, are available)
- Extracts Agent-S into `~/gh/Agent-S`
### Phase 8 — XRDP Xorg configuration and session
Deploys `xorg.conf.j2` to `/etc/X11/xrdp/xorg.conf` and writes `.xsession` to start MATE:
```
exec mate-session
```
Any change to the Xorg config triggers the `restart xrdp` handler.
---
## X Server / RDP configuration
The Xorg config is templated from `ansible/agent_s/xorg.conf.j2` and deployed to `/etc/X11/xrdp/xorg.conf`.
### Resolution
The display is pinned to **1024×768**. UI-TARS and similar visionlanguage models are trained on screenshots at this resolution; higher resolutions degrade accuracy. Fallback modelines for 800×600 and 640×480 satisfy the `xrdpdev` driver's internal requirements but are never selected in normal operation.
### Module loading order
```
Load "glamoregl" ← must precede xorgxrdp
Load "xorgxrdp"
```
`xorgxrdp 0.10.2` (shipped in Ubuntu 25.04) depends on the symbol `glamor_xv_init`, which lives in `libglamoregl.so`. If `glamoregl` is not loaded first, `libxorgxrdp.so` fails with:
```
undefined symbol: glamor_xv_init
```
This cascades — `xrdpdev`, `xrdpmouse`, and `xrdpkeyb` all fail because they depend on symbols exported by `libxorgxrdp.so`.
### Device section
The Device section uses only the `xrdpdev` virtual framebuffer driver with no DRM/GPU options:
```
Section "Device"
Identifier "Video Card (xrdpdev)"
Driver "xrdpdev"
EndSection
```
DRM options (`DRMDevice`, `DRI3`, `DRMAllowList`) are not applicable to the virtual framebuffer and were the source of a previous misconfiguration on `larissa`.
### Display variable
The agent environment sets `DISPLAY=:10.0` (via `~/.agent_s_env`). XRDP assigns display numbers starting at `:10` by default.
---
## Sound device configuration
RDP audio redirection requires two components:
1. **PulseAudio** — the userspace sound server
2. **pulseaudio-module-xrdp** — a PulseAudio sink/source module that forwards audio to the RDP client
### Build process
`pulseaudio-module-xrdp` must be compiled against the PulseAudio headers matching the exact version running on the target host. The playbook automates this:
1. Enables `deb-src` entries in `/etc/apt/sources.list.d/ubuntu.sources`
2. Runs `apt-get source pulseaudio` into `/usr/local/src/`
3. Generates `config.h` by running `meson setup build` in the PulseAudio source tree
4. Extracts `pulseaudio-module-xrdp` from the staged tarball into `/usr/local/src/pulseaudio-module-xrdp/`
5. Runs `./bootstrap && ./configure PULSE_DIR=<pulse-src> && make && make install`
Steps 45 are skipped if `module-xrdp-sink.so` is already present under `/usr/lib/pulse-*/modules/`. Re-running the playbook after a PulseAudio upgrade will trigger a rebuild because the old `.so` won't be found at the new versioned path.
### How audio flows
```
MATE application
→ PulseAudio (userspace daemon, per-user session)
→ module-xrdp-sink (installed by pulseaudio-module-xrdp)
→ XRDP audio channel
→ RDP client
```
PulseAudio is started automatically as part of the MATE session (`mate-session` launches `pulseaudio --start`). No additional service unit is required.
---
## Variables
| Variable | Default | Description |
|----------|---------|-------------|
| `principal_user` | `robert` | Username for the human account the agent runs as |
| `agent_s_rel` | `main` | Git branch/tag to stage from `~/gh/Agent-S` |
| `pulseaudio_module_xrdp_rel` | `devel` | Git branch/tag to stage from `~/gh/pulseaudio-module-xrdp` |
| `agent_s_venv` | `/home/{{ principal_user }}/env/agents` | Path to the Python virtual environment |
| `agent_s_repo` | `/home/{{ principal_user }}/gh/Agent-S` | Extraction path for the Agent-S source |
All variables are set in `ansible/inventory/host_vars/larissa.helu.ca.yml` (`principal_user`) and `ansible/inventory/group_vars/all/vars.yml` (release branches).
---
## Environment activation
After deployment, activate the Agent-S environment on the target host:
```bash
source ~/.agent_s_env
```
This activates the virtual environment, sets `AGENT_S_HOME`, `DISPLAY`, and exports `HF_TOKEN` and `OPENAI_API_KEY` placeholder values that must be filled in for model inference.
---
## Troubleshooting
### X server fails to start — `undefined symbol: glamor_xv_init`
`glamoregl` is missing or ordered after `xorgxrdp` in the Module section. Check `/etc/X11/xrdp/xorg.conf` and ensure `Load "glamoregl"` appears before `Load "xorgxrdp"`.
### Black screen on RDP connect
Confirm `.xsession` contains `exec mate-session` and is executable (`chmod 0755`). Check `/var/log/xrdp-sesman.log` for session startup errors.
### No audio in RDP session
Verify `module-xrdp-sink.so` is installed:
```bash
find /usr/lib/pulse-*/modules/ -name 'module-xrdp-sink.so'
```
If absent, re-run the playbook. If PulseAudio was upgraded, the old `.so` path will not exist and the build steps will execute automatically.
### Accessibility / AT-SPI not working
Confirm the profile script is loaded in the session:
```bash
echo $GTK_MODULES # should include atk-bridge
```
If empty, verify `/etc/profile.d/atspi.sh` exists and the session was started via `.xsession` (not a display manager session that bypasses `/etc/profile.d/`).

View File

@@ -522,6 +522,69 @@ ansible-playbook myapp/deploy.yml
- [Ansible Vault Guide](https://docs.ansible.com/ansible/latest/vault_guide/index.html) - [Ansible Vault Guide](https://docs.ansible.com/ansible/latest/vault_guide/index.html)
- [Inventory Organization](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html) - [Inventory Organization](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html)
## Account Taxonomy
Standardized account roles used across Ansible and Terraform. This taxonomy eliminates confusion between Ansible reserved connection keywords (`remote_user` in `ansible.cfg`) and infrastructure-managed account variables in playbooks.
| Role | Variable | Example | Home | Sudo | Purpose |
|------|----------|---------|------|------|---------|
| user | *(login name)* | robert:1000 | /home/robert | varies | Human user account |
| service_user | `{service}_user` | arke:500 | /srv/arke | no | Service daemon account |
| keeper_user | `keeper_user` | ponos:519 | /srv/ponos | yes | Ansible/Terraform management (sudo) |
| watcher_user | `watcher_user` | poros:520 | — | no | Non-sudo observation account |
| principal_user | `principal_user` | robert:1000 | /home/robert | varies | AI agent collaborative account |
### Key Rules
- **`keeper_user`** replaces all uses of `{{ ansible_user }}` and `{{ remote_user }}` as Jinja2 variables in playbooks
- **`ansible.cfg`** retains `remote_user = ponos` as the SSH connection keyword (Ansible built-in) — this is not a Jinja2 variable
- **`service_user`** accounts live in `/srv/{service}` — if currently in `/home`, they migrate on next re-provision
- **`watcher_user`** is provisioned by Ansible playbook when needed (not via cloud-init)
- **`principal_user`** is for AI agent hosts where the agent operates on behalf of a human user; define in `host_vars/{hostname}.yml`
- Do **not** use `vault_` prefix for any of these — that prefix is reserved for Ansible Vault variables
### Variable Definitions
All taxonomy variables are defined in `inventory/group_vars/all/vars.yml`:
```yaml
# Account Taxonomy
keeper_user: ponos
keeper_uid: 519
keeper_group: ponos
keeper_home: /srv/ponos
watcher_user: poros
watcher_uid: 520
```
`principal_user` is host-specific and defined in the relevant `host_vars` file:
```yaml
# inventory/host_vars/caliban.incus.yml
principal_user: robert
principal_uid: 1000
```
### Bootstrap Chain
1. **Terraform** provisions `ponos` (keeper_user) on all containers via `cloud-init`
- UID 519, home `/srv/ponos`, sudoers, SSH authorized keys at `/srv/ponos/.ssh/authorized_keys`
2. **`ansible.cfg`** sets `remote_user = ponos` so all Ansible connections use the keeper account
3. **Playbooks** reference `{{ keeper_user }}` for any task that needs the management account name
### Playbook Pattern
```yaml
- name: Add keeper_user to service group
become: true
ansible.builtin.user:
name: "{{ keeper_user }}"
groups: "{{ service_group }}"
append: true
```
**Never use** `{{ ansible_user }}` or `{{ remote_user }}` as Jinja2 template variables in tasks — these shadow Ansible built-in connection variables and cause unpredictable behaviour.
## Secret Management Patterns ## Secret Management Patterns
### Ansible Vault (Sandbox Environment) ### Ansible Vault (Sandbox Environment)

View File

@@ -1,70 +0,0 @@
Calculate sunrise and sunset times for the current or targetted day.
The times can be adjusted either for twilight or fixed durations.
The program can either: wait for sunrise or sunset (function: wait),
or return the time (GMT or local) the event occurs (function: list),
or report the day length and twilight timings (function: report),
or simply report if it is DAY or NIGHT (function: poll).
You should specify the latitude and longitude of your target location.
Usage: sunwait [major options] [minor options] [twilight type] [rise|set] [offset] [latitude] [longitude]
Major options, either:
poll Returns immediately indicating DAY or NIGHT. See 'program exit codes'. Default.
wait Sleep until specified event occurs. Else exit immediate.
list [X] Report twilight times for next 'X' days (inclusive). Default: 1.
report [date] Generate a report about the days sunrise and sunset timings. Default: the current day
Minor options, any of:
[no]debug Print extra info and returns in one minute. Default: nodebug.
[no]version Print the version number. Default: noversion.
[no]help Print this help. Default: nohelp.
[no]gmt Print times in GMT or local-time. Default: nogmt.
Twilight types, either:
daylight Top of sun just below the horizon. Default.
civil Civil Twilight. -6 degrees below horizon.
nautical Nautical twilight. -12 degrees below horizon.
astronomical Astronomical twilight. -18 degrees below horizon.
angle [X.XX] User-specified twilight-angle (degrees). Default: 0.
Sunrise/sunset. Only useful with major-options: 'wait' and 'list'. Any of: (default: both)
rise Wait for the sun to rise past specified twilight & offset.
set Wait for the sun to set past specified twilight & offset.
Offset:
offset [MM|HH:MM] Time interval (+ve towards noon) to adjust twilight calculation.
Target date. Only useful with major-options: 'report' or 'list'. Default: today
d [DD] Set the target Day-of-Month to calculate for. 1 to 31.
m [MM] Set the target Month to calculate for. 1 to 12.
y [YYYY] Set the target Year to calculate for. 2000 to 2099.
latitude/longitude coordinates: floating-point degrees, with [NESW] appended. Default: Bingham, England.
Exit (return) codes:
0 OK: exit from 'wait' or 'list' only.
1 Error.
2 Exit from 'poll': it is DAY or twilight.
3 Exit from 'poll': it is NIGHT (after twilight).
Example 1: sunwait wait rise offset -1:15:10 51.477932N 0.000000E
Wait until 1 hour 15 minutes 10 secs before the sun rises in Greenwich, London.
Example 2: sunwait list 7 civil 55.752163N 37.617524E
List civil sunrise and sunset times for today and next 6 days. Moscow.
Example 3: sunwait poll exit angle 10 54.897786N -1.517536E
Indicate by program exit-code if is Day or Night using a custom twilight angle of 10 degrees above horizon. Washington, UK.
Example 4: sunwait list 7 gmt sunrise angle 3
List next 7 days sunrise times, custom +3 degree twilight angle, default location.
Uses GMT; as any change in daylight saving over the specified period is not considered.
Example 5: sunwait report y 20 m 3 d 15 10.49S 105.55E
Produce a report of the different sunrises and sunsets on an arbitrary day (2022/03/15) for an arbitrary location (Christmas Island)
Note that program uses C library functions to determine time and localtime.
Error for timings are estimated at: +/- 4 minutes.

View File

@@ -7,9 +7,9 @@ packages:
- apt-utils - apt-utils
- openssh-server - openssh-server
users: users:
- name: ${var.system_user} - name: ${var.keeper_user}
uid: ${var.user_uid} uid: ${var.keeper_uid}
system: true homedir: /srv/${var.keeper_user}
sudo: ALL=(ALL) NOPASSWD:ALL sudo: ALL=(ALL) NOPASSWD:ALL
shell: /bin/bash shell: /bin/bash
groups: sudo groups: sudo
@@ -271,11 +271,15 @@ resource "incus_instance" "uranian_hosts" {
) )
file { file {
target_path = "/home/${var.system_user}/.ssh/authorized_keys" target_path = "/srv/${var.keeper_user}/.ssh/authorized_keys"
source_path = var.ssh_key_path source_path = var.ssh_key_path
uid = var.user_uid uid = var.keeper_uid
gid = var.user_uid gid = var.keeper_uid
mode = 0750 mode = 0750
create_directories = true create_directories = true
} }
lifecycle {
ignore_changes = [config]
}
} }

View File

@@ -22,16 +22,16 @@ variable "storage_pool" {
default = "default" default = "default"
} }
variable "system_user" { variable "keeper_user" {
description = "System user name for sandbox hosts" description = "Keeper user name for container management (keeper_user taxonomy)"
type = string type = string
default = "robert" default = "ponos"
} }
variable "user_uid" { variable "keeper_uid" {
description = "System user UID" description = "Keeper user UID"
type = number type = number
default = 1000 default = 519
} }
variable "ssh_key_path" { variable "ssh_key_path" {