Compare commits

...

111 Commits

Author SHA1 Message Date
29a4bbf3ba remove notes from site.yaml 2026-02-20 21:19:19 -07:00
a69d0448e2 Move notes to webserver role, add new webserver, add dns entries 2026-02-20 21:16:48 -07:00
a44d10810b bump versions in compose 2026-02-17 21:57:47 -07:00
14cbd714ce add vars support for pangolin updates 2026-02-17 21:44:28 -07:00
505306aebb new default makes sense here 2026-02-06 22:12:46 -07:00
f4176e9f7e notes static site plz 2026-02-06 22:05:48 -07:00
9ac57684f0 add static site support 2026-02-06 22:05:24 -07:00
25384e2946 one more copy 2026-02-06 21:51:38 -07:00
502e0188ed I refactored the lego stuff before and did not fix this 2026-02-06 21:45:55 -07:00
6e924dbe89 i said ssl 2026-02-06 21:35:10 -07:00
f06366ba5e Openssl helps 2026-02-06 21:27:25 -07:00
049b59c880 we do ssl even for noteS 2026-02-06 21:14:27 -07:00
9f4aae6a2e add web root to nginx 2026-02-03 21:49:07 -07:00
1cda8d25b7 add new internal host 2026-02-03 21:44:35 -07:00
e7f6648e4a init non ssl vhost for nginx, add to notes role 2026-02-03 21:34:24 -07:00
21618d2e5e init notes web host 2026-02-02 22:47:15 -07:00
f97da59a1e we want to expose on all interfaces for now 2026-01-27 18:05:21 -07:00
04e6bb24df Missed one 2026-01-27 17:53:25 -07:00
7d3e1fc47f add ports for syncthing - lock down later for home subs 2026-01-27 17:49:42 -07:00
b8b3e6e577 init syncthing role 2026-01-26 23:12:48 -07:00
5c4fe4f2bd always forget site.yaml add 2026-01-26 23:01:48 -07:00
68428bc451 init syncthing host and docker role 2026-01-26 23:01:02 -07:00
0397917b55 nonsense opencloud 2026-01-26 22:39:16 -07:00
c893f3266e bump gerbil version? maybe? 2026-01-26 22:29:38 -07:00
df0cc5fe84 actually need cnames 2026-01-26 19:56:32 -07:00
d810148c14 Revert "add new DNS entries to test opencloud"
This reverts commit 82b88994c8.
2026-01-26 19:55:06 -07:00
82b88994c8 add new DNS entries to test opencloud 2026-01-26 19:49:29 -07:00
d4e008573e add irc packages for irc box to migrate irc to the not cloud 2026-01-26 16:39:39 -07:00
359bfd3672 make git use the ldaps 2026-01-26 16:21:37 -07:00
0854afa1e9 we explicitly need this to be false for now 2026-01-26 16:18:15 -07:00
bc66483ff7 always be missing quotes 2026-01-26 16:13:10 -07:00
3213671967 add olm to gitea 2026-01-26 16:11:34 -07:00
837c02be5f add daemon reload prpoer logic 2026-01-26 16:11:07 -07:00
9a6185846a init olm role 2026-01-26 16:07:22 -07:00
11118c2efa add olm version and checksum 2026-01-26 11:29:09 -07:00
a602bf4bf4 update pangolin version - should be var in future 2026-01-26 01:48:18 -07:00
3b768aa779 update monitoring host root password 2026-01-26 01:27:09 -07:00
48d48ef7b7 Update root password for irc box 2026-01-26 01:25:43 -07:00
b776bc0d35 helps to have sssd 2026-01-26 01:18:37 -07:00
af45f60620 init irc host 2026-01-26 01:03:39 -07:00
084f23ae1e add second ssh key 2026-01-26 00:59:31 -07:00
6072139f60 add new dns entries for irc box 2026-01-26 00:54:44 -07:00
adf714d45c something is wonk here 2026-01-26 00:29:53 -07:00
df2aa04ec6 why is this making many lines please stop 2026-01-26 00:19:53 -07:00
b5d9f37bb7 add ssh authorized keys command 2026-01-26 00:07:45 -07:00
118ee411c7 add ldap_client to monitoring box 2026-01-25 23:58:14 -07:00
f7694cd28b init ldap_client 2026-01-25 23:57:27 -07:00
279c1699e2 remove ldaps since we use starttls on 389 2026-01-25 23:34:43 -07:00
7c3d702ffa derp docs 2026-01-25 22:30:26 -07:00
5a1e0bf333 missing defaults 2026-01-25 22:24:47 -07:00
107fcf3cf0 revert acl changes 2026-01-25 20:59:35 -07:00
4cd202bfe3 one more 2026-01-25 20:43:18 -07:00
86ea89be71 maybe one more 2026-01-25 20:29:01 -07:00
dd8e764b5c oh we had a line for that 2026-01-25 20:14:36 -07:00
c116b8d9fd acl problem persists I think 2026-01-25 20:11:16 -07:00
ea7ed5a568 fix acls maybe 2026-01-25 19:57:53 -07:00
e075075eda missing a piece of my groups saldy 2026-01-25 19:48:37 -07:00
117d5e7738 add admins group and add jonathan to it 2026-01-25 19:17:41 -07:00
23be25546a add sudo schema 2026-01-25 19:00:00 -07:00
Jonathan DeMasi
6f1b19d29a update acl 2026-01-25 16:19:32 -07:00
c9157bc933 cleanup 2026-01-25 00:41:19 -07:00
cfe3ecf85f add back ssh keys tasks 2026-01-25 00:29:36 -07:00
7928217382 this feels wrong 2026-01-25 00:23:31 -07:00
0c1b1329fc add ssh keys 2026-01-25 00:18:07 -07:00
1fa770d85a add directory components back 2026-01-25 00:08:24 -07:00
6e2486166b add openssh 2026-01-25 00:02:38 -07:00
63da7e8ae4 this should all work 2026-01-24 23:55:47 -07:00
bb6c2032ee Refine what is to run now 2026-01-24 23:42:50 -07:00
cb5f1567e7 stepwise 2026-01-24 23:30:30 -07:00
Jonathan DeMasi
8ba8f707bd remove ldappublickey 2026-01-24 21:28:00 -07:00
Jonathan DeMasi
fe8d8e1c4b remove ssh keys for now 2026-01-24 20:44:11 -07:00
Jonathan DeMasi
5df501ab60 bad habits 2026-01-24 20:28:04 -07:00
Jonathan DeMasi
a9e6e8dcda render ldifs 2026-01-24 20:24:12 -07:00
Jonathan DeMasi
315d3ae184 revert 2026-01-24 20:12:48 -07:00
Jonathan DeMasi
16877d74e7 idk if a comment is required 2026-01-24 20:12:00 -07:00
Jonathan DeMasi
81b0c36005 try without pubkeys 2026-01-24 20:08:51 -07:00
Jonathan DeMasi
5ff64bde3b please 2026-01-24 19:56:56 -07:00
Jonathan DeMasi
0deeeb93a1 old schoolg 2026-01-24 19:46:05 -07:00
Jonathan DeMasi
ecb66510d4 maybe that was right 2026-01-24 19:38:24 -07:00
Jonathan DeMasi
89875d5cf5 not fancy 2026-01-24 19:28:01 -07:00
Jonathan DeMasi
7f134527a0 rework schemas 2026-01-24 19:17:34 -07:00
Jonathan DeMasi
9c397dfbc5 rework schemas 2026-01-24 19:17:20 -07:00
Jonathan DeMasi
37b1da7311 dir not plural 2026-01-24 19:08:08 -07:00
Jonathan DeMasi
af60749822 extraneous slash 2026-01-24 19:04:00 -07:00
Jonathan DeMasi
49f9f35f99 add openssh schema missing 2026-01-24 18:58:38 -07:00
Jonathan DeMasi
b04edc27bc hack 2026-01-24 18:48:57 -07:00
Jonathan DeMasi
e241c21855 back to idempotency issue 2026-01-24 18:40:09 -07:00
Jonathan DeMasi
06824fa4cd skip sudo for now, old format perhaps 2026-01-24 18:35:32 -07:00
Jonathan DeMasi
04cd5c75cb put ordering back 2026-01-24 18:27:55 -07:00
Jonathan DeMasi
1a458118a5 duh things are failing because no schemas 2026-01-24 18:27:28 -07:00
Jonathan DeMasi
4ed9beb47e add missing homedir 2026-01-24 18:22:34 -07:00
Jonathan DeMasi
6ce4d9f5d0 is it because my user doesn't here have exist yet 2026-01-24 18:18:06 -07:00
Jonathan DeMasi
3162c48ed4 i Hate this 2026-01-24 18:11:49 -07:00
Jonathan DeMasi
8d3a379440 explicitly cast int, add top 2026-01-24 18:08:09 -07:00
Jonathan DeMasi
efdafe72bb add group membership check separately 2026-01-24 17:52:16 -07:00
Jonathan DeMasi
12d379197e case 2026-01-24 17:38:25 -07:00
Jonathan DeMasi
a1e6bda361 stupid 2026-01-24 17:25:08 -07:00
Jonathan DeMasi
0ac95d79a9 Helps to use right dn 2026-01-24 17:20:11 -07:00
Jonathan DeMasi
6d38c5102c add missing org name var 2026-01-24 17:14:47 -07:00
Jonathan DeMasi
8a2b513e3b our base dn doesn't exist apparently 2026-01-24 17:09:16 -07:00
Jonathan DeMasi
16b86e616a Missing groups tasks 2026-01-24 17:03:38 -07:00
Jonathan DeMasi
f18df7a8d6 helps to add the new role 2026-01-24 16:58:03 -07:00
Jonathan DeMasi
6e95041033 Init openldap data 2026-01-24 16:53:59 -07:00
Jonathan DeMasi
9cb8287808 Fix missing import 2026-01-24 16:21:38 -07:00
Jonathan DeMasi
71df681079 fix auth 2026-01-24 16:13:31 -07:00
d98889584e add more anon acl 2026-01-24 15:19:42 -07:00
0f48514656 remove faulty manager role for now 2026-01-24 15:10:30 -07:00
e101a1c248 Update acls 2026-01-24 14:58:16 -07:00
43c0374f22 naming consistency 2026-01-24 14:18:05 -07:00
bd024fe395 add hashed ldap manager pw 2026-01-24 14:15:30 -07:00
ec1342f6b3 add manager logic 2026-01-24 14:10:58 -07:00
58 changed files with 937 additions and 69 deletions

View File

@@ -5,4 +5,5 @@
- common - common
- nginx - nginx
- gitea - gitea
- olm
- ldap_client

View File

@@ -1 +1,4 @@
lego_version: "4.30.1" lego_version: "4.30.1"
olm_version: "1.4.0"
olm_checksum: "sha256:e35431991b00a6c62fa32c91497a011bde2af9358efc2cb7f49aae5606409f94"
static_site: false

View File

@@ -0,0 +1 @@
syncthing_version: 2.0.13

View File

@@ -1,2 +1,13 @@
gitea_version: 1.25.3 gitea_version: 1.25.3
root_pw: "{{ lookup('bitwarden.secrets.lookup', '4c3d81e6-bb31-40f9-a37a-b3bd00484160') }}" root_pw: "{{ lookup('bitwarden.secrets.lookup', '4c3d81e6-bb31-40f9-a37a-b3bd00484160') }}"
nginx_ssl_enabled: true
olm_config_path: "/etc/olm"
olm_endpoint: "https://pangolin.jthan.io"
olm_id: "{{ lookup('bitwarden.secrets.lookup', 'a27c5cf3-21f1-464a-b911-b3de017888cb') }}"
olm_secret: "{{ lookup('bitwarden.secrets.lookup', 'a9499a7f-4b3e-4c1b-97a0-b3de01789bfb') }}"
olm_loglevel: "INFO"
olm_override_dns: "false"
olm_tunnel_dns: "true"

View File

@@ -0,0 +1 @@
root_pw: "{{ lookup('bitwarden.secrets.lookup', '613b01c3-7d54-4650-ac5c-b3de008a87c3') }}"

View File

@@ -0,0 +1,6 @@
nginx_ssl_enabled: true
static_site: true
letsencrypt_email: "me@jthan.io"
linode_dns_token: "{{ lookup('bitwarden.secrets.lookup', '8849d676-e53e-4aef-a7e6-b3dc014dd698') }}"

View File

@@ -1,10 +1,17 @@
root_pw: "{{ lookup('bitwarden.secrets.lookup', '64a96d82-179b-41af-898d-b3dc014f44a0') }}" root_pw: "{{ lookup('bitwarden.secrets.lookup', '64a96d82-179b-41af-898d-b3dc014f44a0') }}"
letsencrypt_email: "me@jthan.io" letsencrypt_email: "me@jthan.io"
linode_dns_token: "{{ lookup('bitwarden.secrets.lookup', '8849d676-e53e-4aef-a7e6-b3dc014dd698') }}" linode_dns_token: "{{ lookup('bitwarden.secrets.lookup', '8849d676-e53e-4aef-a7e6-b3dc014dd698') }}"
ldap_domain: ldap.home.jthan.io ldap_domain: ldap.home.jthan.io
ldap_basedn: dc=ldap,dc=home,dc=jthan,dc=io ldap_basedn: dc=ldap,dc=home,dc=jthan,dc=io
ldap_manager_pw_hash: "{{ lookup('bitwarden.secrets.lookup', '32654697-7172-4fe3-9767-b3dc015ddd34') }}"
ldap_uri: ldap://ldap.home.jthan.io
ldap_org_name: "jthan.io"
ldap_admin_dn: cn=Manager,{{ ldap_basedn }}
ldap_admin_pw: "{{ lookup('bitwarden.secrets.lookup', '04e7d5d8-f97a-4fbc-9ecf-b3dc015dfbd2') }}"
ldap_users: ldap_users:
- uid: jonathan - uid: jonathan
@@ -12,6 +19,17 @@ ldap_users:
sn: DeMasi sn: DeMasi
uidNumber: 10001 uidNumber: 10001
gidNumber: 10001 gidNumber: 10001
homeDirectory: /home/jonathan
ssh_keys: ssh_keys:
- ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHQyLuuN1SYM3ise8983bA7tT5VJqAI5CpPV1whEA9VF - "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHQyLuuN1SYM3ise8983bA7tT5VJqAI5CpPV1whEA9VF jonathan@briar"
- "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPDsc/KsBWrYGqQXXFSBB5Pt7IflltekQ9nisD0r6Meu jonathan@nautilus"
ldap_groups:
- name: jonathan
gid: 10001
members:
- jonathan
- name: admins
gid: 10100
members:
- jonathan

View File

@@ -1,2 +1,4 @@
root_pw: "{{ lookup('bitwarden.secrets.lookup', 'df4470ca-f839-4c69-bca2-b3de008b0d71') }}"
prometheus_version: 3.9.1 prometheus_version: 3.9.1
prometheus_sha256: 86a6999dd6aacbd994acde93c77cfa314d4be1c8e7b7c58f444355c77b32c584 prometheus_sha256: 86a6999dd6aacbd994acde93c77cfa314d4be1c8e7b7c58f444355c77b32c584

View File

@@ -0,0 +1,6 @@
nginx_ssl_enabled: true
static_site: true
letsencrypt_email: "me@jthan.io"
linode_dns_token: "{{ lookup('bitwarden.secrets.lookup', '8849d676-e53e-4aef-a7e6-b3dc014dd698') }}"

View File

@@ -1,4 +1,7 @@
root_pw: "{{ lookup('bitwarden.secrets.lookup', '279ef4de-8dc7-4e55-a548-b3c400107332') }}" root_pw: "{{ lookup('bitwarden.secrets.lookup', '279ef4de-8dc7-4e55-a548-b3c400107332') }}"
pangolin_version: "1.15.4"
gerbil_version: "1.3.0"
traefik_version: "3.6.8"
pangolin_base_domain: "pangolin.jthan.io" pangolin_base_domain: "pangolin.jthan.io"
pangolin_cert_email: "me@jthan.io" pangolin_cert_email: "me@jthan.io"
pangolin_secret_string: "{{ lookup('bitwarden.secrets.lookup', '30efc9d3-4f98-4b1b-b31b-b3c40010c343') }}" pangolin_secret_string: "{{ lookup('bitwarden.secrets.lookup', '30efc9d3-4f98-4b1b-b31b-b3c40010c343') }}"

View File

@@ -1,4 +1,18 @@
private_domains: private_domains:
- name: jthan.io
records:
- type: A
name: "jthan.io"
value: "192.168.1.17"
- type: A
name: "notes.jthan.io"
value: "192.168.1.16"
- type: AAAA
name: "jthan.io"
value: "2602:fb57:c20:b00:be24:11ff:feac:6536"
- type: AAAA
name: "notes.jthan.io"
value: "2602:fb57:c20:b00:be24:11ff:fe8b:f6db"
- name: home.jthan.io - name: home.jthan.io
records: records:
- type: A - type: A
@@ -22,6 +36,12 @@ private_domains:
- type: A - type: A
name: "ldap.home.jthan.io" name: "ldap.home.jthan.io"
value: 192.168.1.13 value: 192.168.1.13
- type: A
name: "irc.home.jthan.io"
value: 192.168.1.99
- type: A
name: "syncthing.home.jthan.io"
value: 192.168.1.15
- type: AAAA - type: AAAA
name: "storage0.home.jthan.io" name: "storage0.home.jthan.io"
value: "2602:fb57:c20:b00:7a55:36ff:fe02:92c9" value: "2602:fb57:c20:b00:7a55:36ff:fe02:92c9"
@@ -37,6 +57,12 @@ private_domains:
- type: AAAA - type: AAAA
name: "ldap.home.jthan.io" name: "ldap.home.jthan.io"
value: "2602:fb57:c20:b00:be24:11ff:fe9a:da25" value: "2602:fb57:c20:b00:be24:11ff:fe9a:da25"
- type: AAAA
name: "irc.home.jthan.io"
value: "2602:fb57:c20:b00:be24:11ff:fee7:b4bc"
- type: AAAA
name: "syncthing.home.jthan.io"
value: "2602:fb57:c20:b00:be24:11ff:fee9:9c4b"
- type: CNAME - type: CNAME
name: "ha.home.jthan.io" name: "ha.home.jthan.io"
value: "proxy0.home.jthan.io" value: "proxy0.home.jthan.io"

View File

@@ -15,3 +15,13 @@ monitoring.home.jthan.io ansible_host=192.168.1.12
[ldap_servers] [ldap_servers]
ldap.home.jthan.io ldap.home.jthan.io
[irc]
irc.home.jthan.io
[syncthing]
syncthing.home.jthan.io
[webservers]
notes.jthan.io ansible_host=192.168.1.16
jthan.io ansible_host=192.168.1.17

7
ansible/irc.yaml Normal file
View File

@@ -0,0 +1,7 @@
---
# file: irc.yaml
- hosts: irc
roles:
- common
- ldap_client
- irc

View File

@@ -5,3 +5,4 @@
- common - common
- lego - lego
- openldap_server - openldap_server
- openldap_directory

View File

@@ -1,6 +1,7 @@
--- ---
# file: pangolin.yaml # file: monitoring.yaml
- hosts: monitoring - hosts: monitoring
roles: roles:
- common - common
- ldap_client
- prometheus - prometheus

View File

@@ -0,0 +1,92 @@
- name: Download and verify the blackbox archive
get_url:
url: "https://github.com/blackbox/blackbox/releases/download/v{{ blackbox_version }}/blackbox-{{ blackbox_version }}.linux-amd64.tar.gz"
dest: "/tmp/blackbox-{{ blackbox_version }}.linux-amd64.tar.gz"
checksum: "sha256:{{ blackbox_sha256 }}"
register: download_result
- name: Unarchive blackbox binary
unarchive:
src: "{{ download_result.dest }}"
dest: /tmp
remote_src: true # Indicates the source file is on the remote host
owner: root
group: root
mode: 0755
- name: Copy blackbox binary to /usr/local/bin
copy:
src: "/tmp/blackbox-{{ blackbox_version }}.linux-amd64/blackbox"
dest: "/usr/local/bin/blackbox-{{ blackbox_version }}"
owner: root
group: root
mode: '0755'
remote_src: yes
- name: Create blackbox binary symlink
file:
src: "/usr/local/bin/blackbox-{{ blackbox_version }}"
dest: "/usr/local/bin/blackbox"
state: link
owner: root
group: root
mode: '0755' # Permissions for the target file
force: yes
- name: Create a blackbox group
group:
name: blackbox
state: present
gid: 1052
- name: Create a blackbox user
user:
name: blackbox
uid: 1052
group: 1052
comment: "blackbox user"
shell: /bin/bash
state: present
create_home: no
- name: Create blackbox data and config directories
file:
path: "{{ item }}"
state: directory
mode: 0750
owner: blackbox
group: blackbox
loop:
- /var/lib/blackbox_exporter
- /etc/blackbox_exporter
- name: Install blackbox config
template:
src: templates/blackbox.yml.j2
dest: /etc/blackbox/blackbox.yml
owner: blackbox
group: blackbox
mode: '0640'
notify: Restart blackbox
- name: Install blackbox service
template:
src: templates/blackbox.service.j2
dest: /etc/systemd/system/blackbox.service
owner: root
group: root
mode: 0640
register: blackbox_service
notify: Restart blackbox
- name: systemctl daemon-reload to pickup blackbox service changes
systemd_service:
daemon_reload: true
when: blackbox_service.changed
notify: Restart blackbox
- name: Start and enable blackbox
service:
name: blackbox
state: started
enabled: true

View File

@@ -0,0 +1,21 @@
- name: Add docker-ce repo to dnf
command: dnf config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
args:
creates: /etc/yum.repos.d/docker-ce.repo
- name: Install docker-ce and other related packages
package:
name:
- docker-ce
- docker-ce-cli
- containerd.io
- docker-buildx-plugin
- docker-compose-plugin
state: latest
- name: Start and enable docker
service:
name: docker
state: started
enabled: true

View File

@@ -0,0 +1,11 @@
- name: Install irssi
package:
name: irssi
state: latest
- name: Install tmux
package:
name: tmux
state: latest

View File

@@ -0,0 +1,9 @@
- name: restart sssd
service:
name: sssd
state: restarted
- name: restart sshd
service:
name: sshd
state: restarted

View File

@@ -0,0 +1,9 @@
- name: Check if authselect current is already using sssd
shell: 'authselect current | grep -Pzo "(?s)sssd.*?mkhomedir"'
register: grep_result
ignore_errors: true
- name: Run authselect
command: authselect select sssd with-mkhomedir
when: grep_result.rc != 0

View File

@@ -0,0 +1,17 @@
- name: Install openldap client and other required packages
package:
name:
- openldap-clients
- sssd
- sssd-ldap
- oddjob-mkhomedir
- libsss_sudo
state: present
notify: restart sssd
- name: Start and enable oddjobd
service:
name: oddjobd
state: started
enabled: true

View File

@@ -0,0 +1,5 @@
- import_tasks: install.yaml
- import_tasks: authselect.yaml
- import_tasks: sssd.yaml
- import_tasks: nsswitch.yaml
- import_tasks: sshd.yaml

View File

@@ -0,0 +1,6 @@
- name: Ensure nsswitch is looking to sssd for sudo
lineinfile:
path: /etc/nsswitch.conf
regexp: '^sudoers:'
line: 'sudoers: files sss'
backup: true

View File

@@ -0,0 +1,16 @@
- name: Ensure sshd has AuthorizedKeysCommand
lineinfile:
state: present
path: /etc/ssh/sshd_config
regexp: '^#AuthorizedKeysCommand'
line: 'AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys'
notify: restart sshd
- name: Ensure sshd has AuthorizedKeysCommandUser
lineinfile:
state: present
path: /etc/ssh/sshd_config
regexp: '^#AuthorizedKeysCommandUser'
line: 'AuthorizedKeysCommandUser nobody'
notify: restart sshd

View File

@@ -0,0 +1,15 @@
- name: Create sssd.conf
template:
src: templates/sssd.conf.j2
dest: /etc/sssd/sssd.conf
owner: root
group: sssd
mode: '0640'
notify: restart sssd
- name: Start and enable sssd
service:
name: sssd
state: started
enabled: true

View File

@@ -0,0 +1,41 @@
[sssd]
services = nss, pam, sudo, ssh
domains = ldap
[nss]
#debug_level = 0x3ff0
[pam]
[domain/ldap]
#debug_level = 0x3ff0
id_provider = ldap
auth_provider = ldap
chpass_provider = ldap
sudo_provider = ldap
ldap_schema = rfc2307
ldap_uri = ldap://ldap.home.jthan.io
ldap_search_base = dc=ldap,dc=home,dc=jthan,dc=io
ldap_sudo_search_base = ou=SUDOers,dc=ldap,dc=home,dc=jthan,dc=io
ldap_id_use_start_tls = true
ldap_tls_reqcert = demand
ldap_user_object_class = posixAccount
ldap_group_object_class = posixGroup
ldap_user_uid_number = uidNumber
ldap_user_gid_number = gidNumber
ldap_user_home_directory = homeDirectory
ldap_user_shell = loginShell
ldap_user_ssh_public_key = sshPublicKey
cache_credentials = true
enumerate = false
# Access control (optional)
# ldap_access_filter = (memberOf=cn=linux-users,ou=groups,dc=example,dc=com)
[sudo]
#debug_level = 0x3ff0
[ssh]
#debug_level = 0x3ff0

View File

@@ -12,17 +12,63 @@
mode: '0644' mode: '0644'
notify: Restart nginx notify: Restart nginx
- name: Install openssl
package:
name: openssl
state: latest
when: nginx_ssl_enabled
- name: Create nginx ssl directory - name: Create nginx ssl directory
file: file:
path: /etc/nginx/ssl path: /etc/nginx/ssl
state: directory state: directory
mode: '0755' mode: '0755'
when: nginx_ssl_enabled
- name: Generate dhparams - name: Generate dhparams
command: command:
cmd: openssl dhparam -out /etc/nginx/ssl/dhparam.pem 4096 cmd: openssl dhparam -out /etc/nginx/ssl/dhparam.pem 4096
creates: /etc/nginx/ssl/dhparam.pem creates: /etc/nginx/ssl/dhparam.pem
notify: Restart nginx notify: Restart nginx
when: nginx_ssl_enabled
- name: Copy SSL certificate into place for SSL enabled nginx server
copy:
src: /root/.lego/certificates/{{ inventory_hostname }}.crt
dest: /etc/nginx/ssl/{{ inventory_hostname }}.crt
owner: nginx
group: nginx
mode: 0600
remote_src: true
when: nginx_ssl_enabled
- name: Copy SSL issuer certificate into place for SSL enabled nginx server
copy:
src: /root/.lego/certificates/{{ inventory_hostname }}.issuer.crt
dest: /etc/nginx/ssl/{{ inventory_hostname }}.issuer.crt
owner: nginx
group: nginx
mode: 0600
remote_src: true
when: nginx_ssl_enabled
- name: Copy SSL key into place for SSL enabled nginx server
copy:
src: /root/.lego/certificates/{{ inventory_hostname }}.key
dest: /etc/nginx/ssl/{{ inventory_hostname }}.key
owner: nginx
group: nginx
mode: 0600
remote_src: true
when: nginx_ssl_enabled
- name: Create web root
file:
path: /srv/http/{{ inventory_hostname }}/html
state: directory
owner: nginx
group: nginx
mode: '0755'
- name: Start and enable nginx - name: Start and enable nginx
service: service:
@@ -45,13 +91,24 @@
permanent: true permanent: true
immediate: true immediate: true
offline: true offline: true
when: nginx_ssl_enabled
- name: Create nginx vhosts - name: Create nginx non-ssl vhost
template: template:
src: templates/vhost.conf.j2 src: templates/vhost.conf.j2
dest: /etc/nginx/conf.d/{{ inventory_hostname}}.conf
owner: nginx
group: nginx
mode: '0644'
notify: Restart nginx
when: not nginx_ssl_enabled
- name: Create nginx ssl vhost
template:
src: templates/vhost_ssl.conf.j2
dest: /etc/nginx/conf.d/{{ inventory_hostname }}.conf dest: /etc/nginx/conf.d/{{ inventory_hostname }}.conf
owner: nginx owner: nginx
group: nginx group: nginx
mode: '0644' mode: '0644'
notify: Restart nginx notify: Restart nginx
when: nginx_ssl_enabled

View File

@@ -3,46 +3,9 @@ server {
server_name {{ inventory_hostname }}; server_name {{ inventory_hostname }};
root /srv/http/{{ inventory_hostname }}/html; root /srv/http/{{ inventory_hostname }}/html;
# Allow lego to renew certs here using its own http server, we just proxy
location /.well-known/acme-challenge {
proxy_pass http://127.0.0.1:81;
proxy_set_header Host $host;
}
location / { location / {
return 301 https://{{ inventory_hostname }}$request_uri;
index index.htm index.html; index index.htm index.html;
error_page 404 /404.html;
try_files $uri $uri.html $uri/ =404;
} }
}
server {
listen [::]:443 ssl ipv6only=off default_server;
http2 on;
ssl_certificate /etc/nginx/ssl/{{ inventory_hostname}}.crt;
ssl_certificate_key /etc/nginx/ssl/{{ inventory_hostname}}.key;
ssl_trusted_certificate /etc/nginx/ssl/{{ inventory_hostname}}.issuer.crt;
ssl_session_timeout 1d;
ssl_session_cache shared:SSL:50m;
ssl_session_tickets off;
ssl_protocols TLSv1.3;
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
ssl_dhparam /etc/nginx/ssl/dhparam.pem;
ssl_prefer_server_ciphers on;
add_header Strict-Transport-Security max-age=15768000;
ssl_stapling on;
ssl_stapling_verify on;
server_name {{ inventory_hostname }};
location / {
client_max_body_size 512M;
proxy_pass http://localhost:3000;
proxy_set_header Connection $http_connection;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
} }

View File

@@ -0,0 +1,56 @@
server {
listen [::]:80 ipv6only=off default_server;
server_name {{ inventory_hostname }};
root /srv/http/{{ inventory_hostname }}/html;
# Allow lego to renew certs here using its own http server, we just proxy
location /.well-known/acme-challenge {
proxy_pass http://127.0.0.1:81;
proxy_set_header Host $host;
}
location / {
return 301 https://{{ inventory_hostname }}$request_uri;
index index.htm index.html;
}
}
server {
listen [::]:443 ssl ipv6only=off default_server;
http2 on;
ssl_certificate /etc/nginx/ssl/{{ inventory_hostname}}.crt;
ssl_certificate_key /etc/nginx/ssl/{{ inventory_hostname}}.key;
ssl_trusted_certificate /etc/nginx/ssl/{{ inventory_hostname}}.issuer.crt;
ssl_session_timeout 1d;
ssl_session_cache shared:SSL:50m;
ssl_session_tickets off;
ssl_protocols TLSv1.3;
ssl_ciphers 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:kEDH+AESGCM:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES256-SHA:DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA:DHE-RSA-AES256-SHA:AES128-GCM-SHA256:AES256-GCM-SHA384:AES128-SHA256:AES256-SHA256:AES128-SHA:AES256-SHA:AES:CAMELLIA:DES-CBC3-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA';
ssl_dhparam /etc/nginx/ssl/dhparam.pem;
ssl_prefer_server_ciphers on;
add_header Strict-Transport-Security max-age=15768000;
ssl_stapling on;
ssl_stapling_verify on;
server_name {{ inventory_hostname }};
{% if not static_site %}
location / {
client_max_body_size 512M;
proxy_pass http://localhost:3000;
proxy_set_header Connection $http_connection;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
{% endif %}
{% if static_site %}
root /srv/http/{{ inventory_hostname }}/html;
index index.html;
error_page 404 /404.html;
location / {
try_files $uri $uri.html $uri/ =404;
}
{% endif %}
}

View File

@@ -0,0 +1,4 @@
- name: restart olm
service:
name: olm
state: restarted

View File

@@ -0,0 +1,56 @@
- name: Download and verify the olm binary
get_url:
url: "https://github.com/fosrl/olm/releases/download/{{ olm_version }}/olm_linux_amd64"
dest: "/usr/local/bin/olm_linux_amd64-{{ olm_version }}"
checksum: "{{ olm_checksum }}"
notify: restart olm
- name: Create olm binary symlink
file:
src: "/usr/local/bin/olm_linux_amd64-{{ olm_version }}"
dest: "/usr/local/bin/olm"
state: link
owner: root
group: root
mode: '0755' # Permissions for the target file
force: yes
notify: restart olm
- name: Create olm config directory
file:
path: "{{ olm_config_path }}"
state: directory
mode: '0700'
owner: root
group: root
- name: Create olm config file
template:
src: templates/config.json.j2
dest: "{{ olm_config_path}}/config.json"
owner: root
group: root
mode: '0600'
- name: Create olm systemd service
template:
src: templates/olm.service.j2
dest: /etc/systemd/system/olm.service
owner: root
group: root
mode: '0600'
register: olm_service
notify: restart olm
- name: systemctl daemon-reload to pickup olm service changes
systemd_service:
daemon_reload: true
when: olm_service.changed
notify: restart olm
- name: Start and enable olm service
service:
name: olm
state: started
enabled: true
daemon_reload: true

View File

@@ -0,0 +1,25 @@
{
"endpoint": "{{ olm_endpoint }}",
"id": "{{ olm_id }}",
"secret": "{{ olm_secret }}",
"org": "",
"userToken": "",
"mtu": 1280,
"dns": "8.8.8.8",
"upstreamDNS": [
"192.168.1.2"
],
"interface": "olm",
"logLevel": "{{ olm_loglevel }}",
"enableApi": false,
"httpAddr": "",
"socketPath": "/var/run/olm.sock",
"pingInterval": "3s",
"pingTimeout": "5s",
"disableHolepunch": false,
"tlsClientCert": "",
"overrideDNS": {{ olm_override_dns }},
"tunnelDNS": {{ olm_tunnel_dns }},
"disableRelay": false,
"Version": "1.4.0"
}

View File

@@ -0,0 +1,12 @@
[Unit]
Description=Olm
After=network.target
[Service]
ExecStart=/usr/local/bin/olm
Restart=always
User=root
Environment="CONFIG_FILE={{ olm_config_path }}/config.json"
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,10 @@
ldap_uri: ldap://ldap.example.com
ldap_basedn: dc=example,dc=com
ldap_admin_dn: cn=Manager,{{ ldap_basedn }}
ldap_admin_pw: "{{ ldap_admin_password }}"
ldap_people_ou: ou=People,{{ ldap_basedn }}
ldap_groups_ou: ou=Groups,{{ ldap_basedn }}
ldap_sudo_ou: ou=SUDOers,{{ ldap_basedn }}

View File

@@ -0,0 +1,31 @@
- name: Ensure base DN exists
community.general.ldap_entry:
dn: "{{ ldap_basedn }}"
state: present
objectClass:
- top
- dcObject
- organization
attributes:
dc: "{{ ldap_basedn.split(',')[0].split('=')[1] }}"
o: "{{ ldap_org_name }}"
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes
- name: Create base OUs
community.general.ldap_entry:
dn: "{{ item }}"
state: present
objectClass: organizationalUnit
loop:
- "{{ ldap_people_ou }}"
- "{{ ldap_groups_ou }}"
- "{{ ldap_sudo_ou }}"
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes

View File

@@ -0,0 +1,47 @@
#- name: Ensure LDAP groups exist
# community.general.ldap_entry:
# dn: "cn={{ item.name }},ou=groups,{{ ldap_basedn }}"
# state: present
# objectClass:
# - posixGroup
# attributes:
# cn: "{{ item.name }}"
# gidNumber: "{{ item.gid }}"
# loop: "{{ ldap_groups }}"
# args:
# server_uri: "{{ ldap_uri }}"
# bind_dn: "{{ ldap_admin_dn }}"
# bind_pw: "{{ ldap_admin_pw }}"
# start_tls: yes
#
- name: Ensure LDAP groups exist
community.general.ldap_entry:
dn: "cn={{ item.name }},ou=Groups,{{ ldap_basedn }}"
state: present
objectClass:
- top
- posixGroup
attributes:
cn: "{{ item.name }}"
gidNumber: "{{ item.gid | int }}"
loop: "{{ ldap_groups }}"
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes
- name: Ensure group memberships are correct
community.general.ldap_attrs:
dn: "cn={{ item.name }},ou=Groups,{{ ldap_basedn }}"
attributes:
memberUid: "{{ item.members }}"
state: exact
loop: "{{ ldap_groups }}"
when: item.members is defined and item.members | length > 0
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes

View File

@@ -0,0 +1,5 @@
- import_tasks: base.yaml
- import_tasks: groups.yaml
- import_tasks: users.yaml
- import_tasks: ssh_keys.yaml
- import_tasks: sudo.yaml

View File

@@ -0,0 +1,13 @@
- name: Set SSH keys
community.general.ldap_attrs:
dn: "uid={{ item.uid }},{{ ldap_people_ou }}"
state: exact
attributes:
sshPublicKey: "{{ item.ssh_keys }}"
loop: "{{ ldap_users }}"
when: item.ssh_keys is defined
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes

View File

@@ -0,0 +1,32 @@
- name: Ensure sudo defaults entry exists
community.general.ldap_entry:
dn: "cn=defaults,{{ ldap_sudo_ou }}"
objectClass:
- top
- sudoRole
attributes:
cn: defaults
sudoOption:
- env_reset
state: present
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes
- name: Admin sudo rule
community.general.ldap_entry:
dn: "cn=admins-all,{{ ldap_sudo_ou }}"
state: present
objectClass: sudoRole
attributes:
cn: admins-all
sudoUser: "%admins"
sudoHost: ALL
sudoCommand: ALL
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes

View File

@@ -0,0 +1,20 @@
dn: uid={{ user.uid }},ou=People,{{ ldap_basedn }}
objectClass: top
objectClass: inetOrgPerson
objectClass: posixAccount
objectClass: shadowAccount
objectClass: ldapPublicKey
uid: {{ user.uid }}
cn: {{ user.cn }}
sn: {{ user.sn }}
uidNumber: {{ user.uidNumber }}
gidNumber: {{ user.gidNumber }}
homeDirectory: /home/{{ user.uid }}
loginShell: {{ user.shell | default('/bin/bash') }}
{% if user.ssh_keys is defined %}
{% for key in user.ssh_keys %}
sshPublicKey: {{ key }}
{% endfor %}
{% endif %}

View File

@@ -0,0 +1,32 @@
- name: Render user LDIFs
template:
src: user.ldif.j2
dest: "/tmp/ldap-user-{{ user.uid }}.ldif"
mode: '0600'
loop: "{{ ldap_users }}"
loop_control:
loop_var: user
- name: Ensure users exist
community.general.ldap_entry:
dn: "uid={{ item.uid }},{{ ldap_people_ou }}"
state: present
objectClass:
- inetOrgPerson
- posixAccount
- ldapPublicKey
attributes:
cn: "{{ item.cn }}"
sn: "{{ item.sn }}"
uid: "{{ item.uid }}"
uidNumber: "{{ item.uidNumber }}"
gidNumber: "{{ item.gidNumber }}"
homeDirectory: "/home/{{ item.uid }}"
loginShell: /bin/bash
sshPublicKey: "{{ item.ssh_keys }}"
loop: "{{ ldap_users }}"
args:
server_uri: "{{ ldap_uri }}"
bind_dn: "{{ ldap_admin_dn }}"
bind_pw: "{{ ldap_admin_pw }}"
start_tls: yes

View File

@@ -6,3 +6,6 @@ ldap_bind_dn: cn=binduser,{{ ldap_basedn }}
ldap_cert_path: /etc/openldap/certs/ldap.crt ldap_cert_path: /etc/openldap/certs/ldap.crt
ldap_key_path: /etc/openldap/certs/ldap.key ldap_key_path: /etc/openldap/certs/ldap.key
ldap_directory_manager_dn: "cn=Manager,{{ ldap_basedn }}"
ldap_directory_manager_pw_hash: "{{ ldap_manager_pw_hash }}"

View File

@@ -1,11 +1,11 @@
--- ---
- name: restart slapd - name: restart slapd
ansible.builtin.service: service:
name: slapd name: slapd
state: restarted state: restarted
- name: reload slapd - name: reload slapd
ansible.builtin.service: service:
name: slapd name: slapd
state: reloaded state: reloaded

View File

@@ -9,9 +9,16 @@
by dn="{{ ldap_admin_dn }}" write by dn="{{ ldap_admin_dn }}" write
by dn="{{ ldap_bind_dn }}" read by dn="{{ ldap_bind_dn }}" read
by self write by self write
by anonymous auth
by * none
- >-
to attrs=uid
by * read
by anonymous read
by * none by * none
- >- - >-
to * to *
by * read
by dn="{{ ldap_admin_dn }}" write by dn="{{ ldap_admin_dn }}" write
by dn="{{ ldap_bind_dn }}" read by dn="{{ ldap_bind_dn }}" read
by self read by self read

View File

@@ -0,0 +1,8 @@
dn: cn=openssh-lpk-openldap,cn=schema,cn=config
objectClass: olcSchemaConfig
olcAttributeTypes: {0}( 1.3.6.1.4.1.24552.500.1.1.1.13 NAME 'sshPublicKey' D
ESC 'MANDATORY: OpenSSH Public key' EQUALITY octetStringMatch SYNTAX 1.3.6.
1.4.1.1466.115.121.1.40 )
olcObjectClasses: {0}( 1.3.6.1.4.1.24552.500.1.1.2.0 NAME 'ldapPublicKey' DE
SC 'MANDATORY: OpenSSH LPK objectclass' SUP top AUXILIARY MUST ( sshPublicK
ey $ uid ) )

View File

@@ -0,0 +1,11 @@
dn: cn=sudo,cn=schema,cn=config
objectClass: olcSchemaConfig
cn: sudo
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.1 NAME 'sudoUser' DESC 'User(s) who may run sudo' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.2 NAME 'sudoHost' DESC 'Host(s) who may run sudo' EQUALITY caseExactIA5Match SUBSTR caseExactIA5SubstringsMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.3 NAME 'sudoCommand' DESC 'Command(s) to be executed by sudo' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.4 NAME 'sudoRunAs' DESC 'User(s) impersonated by sudo (deprecated)' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.5 NAME 'sudoOption' DESC 'Options(s) followed by sudo' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.6 NAME 'sudoRunAsUser' DESC 'User(s) impersonated by sudo' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcAttributeTypes: ( 1.3.6.1.4.1.15953.9.1.7 NAME 'sudoRunAsGroup' DESC 'Group(s) impersonated by sudo' EQUALITY caseExactIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 )
olcObjectClasses: ( 1.3.6.1.4.1.15953.9.2.1 NAME 'sudoRole' SUP top STRUCTURAL DESC 'Sudoer Entries' MUST ( cn ) MAY ( sudoUser $ sudoHost $ sudoCommand $ sudoRunAs $ sudoRunAsUser $ sudoRunAsGroup $ sudoOption $ description ) )

View File

@@ -0,0 +1,10 @@
- name: Find main LDAP database DN
command: >
ldapsearch -Y EXTERNAL -H ldapi:/// \
-b cn=config '(olcSuffix={{ ldap_basedn }})' dn
register: ldap_db_dn
changed_when: false
- name: Set fact for main database DN
set_fact:
ldap_main_db_dn: "{{ ldap_db_dn.stdout_lines | select('match','^dn:') | first | regex_replace('^dn: ','') }}"

View File

@@ -27,7 +27,7 @@
- name: Permanently enable ldaps service firewalld - name: Permanently enable ldaps service firewalld
ansible.posix.firewalld: ansible.posix.firewalld:
service: ldaps service: ldaps
state: enabled state: disabled
permanent: true permanent: true
immediate: true immediate: true
offline: true offline: true

View File

@@ -1,6 +1,8 @@
- import_tasks: install.yaml - import_tasks: install.yaml
- import_tasks: disable_ldaps.yaml - import_tasks: disable_ldaps.yaml
#- import_tasks: schemas.yaml - import_tasks: schemas.yaml
- import_tasks: config.yaml - import_tasks: config.yaml
- import_tasks: find_database.yaml
- import_tasks: manager.yaml
- import_tasks: tls.yaml - import_tasks: tls.yaml
- import_tasks: acls.yaml - import_tasks: acls.yaml

View File

@@ -0,0 +1,10 @@
- name: Set directory Manager DN and password
community.general.ldap_attrs:
dn: "{{ ldap_main_db_dn }}"
attributes:
olcRootDN: "{{ ldap_directory_manager_dn }}"
olcRootPW: "{{ ldap_directory_manager_pw_hash }}"
state: exact
args:
server_uri: ldapi:///
sasl_class: external

View File

@@ -1,31 +1,66 @@
- name: List existing schemas ---
# roles/ldap_server/tasks/schemas.yml
- name: Ensure LDAP core schema is loaded
command: > command: >
ldapsearch -Y EXTERNAL -H ldapi:/// -b cn=schema,cn=config dn ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/core.ldif
register: ldap_schemas args:
changed_when: false creates: /etc/openldap/schema/.core_loaded
- name: Load cosine schema - name: Ensure LDAP cosine schema is loaded
command: > command: >
ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/cosine.ldif ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/cosine.ldif
when: "'cn=cosine,cn=schema,cn=config' not in ldap_schemas.stdout" args:
creates: /etc/openldap/schema/.cosine_loaded
- name: Load inetorgperson schema - name: Ensure LDAP inetorgperson schema is loaded
command: > command: >
ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/inetorgperson.ldif ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/inetorgperson.ldif
when: "'cn=inetorgperson,cn=schema,cn=config' not in ldap_schemas.stdout" args:
creates: /etc/openldap/schema/.inetorgperson_loaded
- name: Load nis schema - name: Ensure LDAP nis schema is loaded
command: > command: >
ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/nis.ldif ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/nis.ldif
when: "'cn=nis,cn=schema,cn=config' not in ldap_schemas.stdout" args:
creates: /etc/openldap/schema/.nis_loaded
- name: Load sudo schema - name: Copy sudo schema into place
copy:
src: files/sudo.ldif
dest: /etc/openldap/schema/sudo.ldif
owner: ldap
group: ldap
mode: '0600'
#
- name: Copy openssh schema into place
copy:
src: files/openssh.ldif
dest: /etc/openldap/schema/openssh.ldif
owner: ldap
group: ldap
mode: '0600'
- name: Ensure custom LDAP schemas (sudo + openssh) are loaded
command: > command: >
ldapadd -Y EXTERNAL -H ldapi:/// -f /usr/share/doc/sudo/schema.OpenLDAP ldapadd -Y EXTERNAL -H ldapi:/// -f {{ item.file }}
when: "'cn=sudo,cn=schema,cn=config' not in ldap_schemas.stdout" args:
creates: "/etc/openldap/schema/.{{ item.name }}_loaded"
loop:
- { name: "sudo", file: "/etc/openldap/schema/sudo.ldif" }
- { name: "openssh", file: "/etc/openldap/schema/openssh.ldif" }
loop_control:
label: "{{ item.name }}"
- name: Load OpenSSH public key schema # Touch marker files for idempotency (optional but recommended)
command: > - name: Ensure marker files exist
ldapadd -Y EXTERNAL -H ldapi:/// -f /etc/openldap/schema/openssh-lpk.ldif file:
when: "'cn=openssh-lpk,cn=schema,cn=config' not in ldap_schemas.stdout" path: "/etc/openldap/schema/.{{ item.name }}_loaded"
state: touch
loop:
- { name: "core" }
- { name: "cosine" }
- { name: "inetorgperson" }
- { name: "nis" }
- { name: "sudo" }
- { name: "openssh" }

View File

@@ -1,6 +1,6 @@
services: services:
pangolin: pangolin:
image: fosrl/pangolin:latest # https://github.com/fosrl/pangolin/releases image: fosrl/pangolin:{{ pangolin_version }} # https://github.com/fosrl/pangolin/releases
container_name: pangolin container_name: pangolin
restart: unless-stopped restart: unless-stopped
volumes: volumes:
@@ -12,7 +12,7 @@ services:
retries: 15 retries: 15
gerbil: gerbil:
image: fosrl/gerbil:latest # https://github.com/fosrl/gerbil/releases image: fosrl/gerbil:{{ gerbil_version }} # https://github.com/fosrl/gerbil/releases
container_name: gerbil container_name: gerbil
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
@@ -38,7 +38,7 @@ services:
- 80:80 # Port for traefik because of the network_mode - 80:80 # Port for traefik because of the network_mode
traefik: traefik:
image: traefik:v3.4.0 image: traefik:v{{ traefik_version }}
container_name: traefik container_name: traefik
restart: unless-stopped restart: unless-stopped
network_mode: service:gerbil # Ports appear on the gerbil service network_mode: service:gerbil # Ports appear on the gerbil service

View File

@@ -0,0 +1,47 @@
- name: Create docker-compose project directory
file:
path: /root/syncthing
state: directory
mode: 0750
owner: root
group: root
- name: Create docker-compose
template:
src: templates/docker-compose.yaml.j2
dest: /root/syncthing/docker-compose.yaml
owner: root
group: root
mode: 0600
- name: Run docker-compose up
command: docker compose up -d --build
args:
chdir: /root/syncthing
- name: Permit traffic in default zone on port 8384/tcp
ansible.posix.firewalld:
port: 8384/tcp
permanent: true
state: enabled
immediate: true
offline: true
- name: Permit traffic in default zone on port 22000/tcp
ansible.posix.firewalld:
port: 22000/tcp
permanent: true
state: enabled
immediate: true
offline: true
- name: Permit traffic in default zone on port 22000/udp
ansible.posix.firewalld:
port: 22000/udp
permanent: true
state: enabled
immediate: true
offline: true

View File

@@ -0,0 +1,20 @@
---
version: "3"
services:
syncthing:
image: syncthing/syncthing:{{ syncthing_version }}
container_name: syncthing
hostname: {{ inventory_hostname }}
environment:
- PUID=1000
- PGID=1000
- STGUIADDRESS=0.0.0.0:8384
volumes:
- /var/lib/syncthing:/var/syncthing
network_mode: host
restart: unless-stopped
healthcheck:
test: curl -fkLsS -m 2 127.0.0.1:8384/rest/noauth/health | grep -o --color=never OK || exit 1
interval: 1m
timeout: 10s
retries: 3

View File

@@ -6,4 +6,8 @@
- import_playbook: pangolin.yaml - import_playbook: pangolin.yaml
- import_playbook: monitoring.yaml - import_playbook: monitoring.yaml
- import_playbook: ldap_server.yaml - import_playbook: ldap_server.yaml
- import_playbook: irc.yaml
- import_playbook: syncthing.yaml
#- import_playbook: notes.yaml
#- import_playbook: authentik.yaml #- import_playbook: authentik.yaml

8
ansible/syncthing.yaml Normal file
View File

@@ -0,0 +1,8 @@
---
# file: syncthing.yaml
- hosts: syncthing
roles:
- common
- ldap_client
- docker
- syncthing

View File

@@ -3,5 +3,6 @@
- hosts: webservers - hosts: webservers
roles: roles:
- common - common
- nginx - ldap_client
- lego - lego
- nginx