ConfigLMM 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +34 -0
- data/CNAME +1 -0
- data/Examples/.lmm.state.yaml +159 -0
- data/Examples/ConfigLMM.mm.yaml +32 -0
- data/Examples/Implemented.mm.yaml +252 -4
- data/Examples/SmallBusiness.mm.yaml +492 -0
- data/Plugins/Apps/Answer/answer.lmm.rb +165 -0
- data/Plugins/Apps/Answer/answer@.service +40 -0
- data/Plugins/Apps/ArchiSteamFarm/ArchiSteamFarm.conf.erb +0 -3
- data/Plugins/Apps/ArchiSteamFarm/ArchiSteamFarm.lmm.rb +0 -1
- data/Plugins/Apps/Authentik/Authentik-ProxyOutpost.container +7 -1
- data/Plugins/Apps/Authentik/Authentik-Server.container +6 -1
- data/Plugins/Apps/Authentik/Authentik-Worker.container +6 -1
- data/Plugins/Apps/Authentik/Authentik.conf.erb +12 -7
- data/Plugins/Apps/Authentik/Authentik.lmm.rb +226 -61
- data/Plugins/Apps/BookStack/BookStack.conf.erb +0 -3
- data/Plugins/Apps/BookStack/BookStack.container +5 -0
- data/Plugins/Apps/BookStack/BookStack.lmm.rb +14 -3
- data/Plugins/Apps/Cassandra/Cassandra.lmm.rb +9 -19
- data/Plugins/Apps/ClickHouse/ClickHouse.container +28 -0
- data/Plugins/Apps/ClickHouse/ClickHouse.lmm.rb +113 -0
- data/Plugins/Apps/ClickHouse/Config/listen.yaml +2 -0
- data/Plugins/Apps/ClickHouse/Config/logger.yaml +8 -0
- data/Plugins/Apps/ClickHouse/Config/zookeepers.yaml +5 -0
- data/Plugins/Apps/ClickHouse/Connection.rb +96 -0
- data/Plugins/Apps/Discourse/Discourse-Sidekiq.container +5 -0
- data/Plugins/Apps/Discourse/Discourse.conf.erb +1 -4
- data/Plugins/Apps/Discourse/Discourse.container +4 -0
- data/Plugins/Apps/Discourse/Discourse.lmm.rb +116 -55
- data/Plugins/Apps/Dovecot/Dovecot.lmm.rb +74 -62
- data/Plugins/Apps/ERPNext/ERPNext-Frontend.container +6 -1
- data/Plugins/Apps/ERPNext/ERPNext-Queue.container +5 -0
- data/Plugins/Apps/ERPNext/ERPNext-Scheduler.container +5 -0
- data/Plugins/Apps/ERPNext/ERPNext-Websocket.container +6 -1
- data/Plugins/Apps/ERPNext/ERPNext.container +6 -1
- data/Plugins/Apps/ERPNext/ERPNext.lmm.rb +138 -127
- data/Plugins/Apps/GitLab/GitLab.container +6 -0
- data/Plugins/Apps/GitLab/GitLab.lmm.rb +43 -49
- data/Plugins/Apps/Homepage/Homepage.conf.erb +86 -0
- data/Plugins/Apps/Homepage/Homepage.container +19 -0
- data/Plugins/Apps/Homepage/Homepage.lmm.rb +54 -0
- data/Plugins/Apps/IPFS/IPFS.conf.erb +0 -3
- data/Plugins/Apps/IPFS/IPFS.lmm.rb +0 -1
- data/Plugins/Apps/InfluxDB/InfluxDB.conf.erb +0 -3
- data/Plugins/Apps/InfluxDB/InfluxDB.lmm.rb +0 -1
- data/Plugins/Apps/Jackett/Jackett.conf.erb +0 -3
- data/Plugins/Apps/Jackett/Jackett.lmm.rb +0 -1
- data/Plugins/Apps/Jellyfin/Jellyfin.conf.erb +0 -3
- data/Plugins/Apps/Jellyfin/Jellyfin.lmm.rb +0 -1
- data/Plugins/Apps/LetsEncrypt/LetsEncrypt.lmm.rb +49 -28
- data/Plugins/Apps/LibreTranslate/LibreTranslate.container +21 -0
- data/Plugins/Apps/LibreTranslate/LibreTranslate.lmm.rb +34 -0
- data/Plugins/Apps/Lobsters/Containerfile +81 -0
- data/Plugins/Apps/Lobsters/Lobsters-Tasks.container +26 -0
- data/Plugins/Apps/Lobsters/Lobsters.conf.erb +99 -0
- data/Plugins/Apps/Lobsters/Lobsters.container +27 -0
- data/Plugins/Apps/Lobsters/Lobsters.lmm.rb +196 -0
- data/Plugins/Apps/Lobsters/crontab +3 -0
- data/Plugins/Apps/Lobsters/database.yml +26 -0
- data/Plugins/Apps/Lobsters/entrypoint.sh +30 -0
- data/Plugins/Apps/Lobsters/generateCredentials.rb +19 -0
- data/Plugins/Apps/Lobsters/lobsters-cron.sh +25 -0
- data/Plugins/Apps/Lobsters/lobsters-daily.sh +23 -0
- data/Plugins/Apps/Lobsters/puma.rb +49 -0
- data/Plugins/Apps/MariaDB/Connection.rb +55 -0
- data/Plugins/Apps/MariaDB/MariaDB.lmm.rb +60 -53
- data/Plugins/Apps/Mastodon/Mastodon-Sidekiq.container +22 -0
- data/Plugins/Apps/Mastodon/Mastodon-Streaming.container +20 -0
- data/Plugins/Apps/Mastodon/Mastodon.conf.erb +34 -45
- data/Plugins/Apps/Mastodon/Mastodon.container +28 -0
- data/Plugins/Apps/Mastodon/Mastodon.lmm.rb +240 -5
- data/Plugins/Apps/Mastodon/configlmm.rake +30 -0
- data/Plugins/Apps/Mastodon/entrypoint.sh +16 -0
- data/Plugins/Apps/Matrix/Element.container +5 -0
- data/Plugins/Apps/Matrix/Matrix.conf.erb +2 -8
- data/Plugins/Apps/Matrix/Matrix.lmm.rb +100 -71
- data/Plugins/Apps/Matrix/Synapse.container +5 -0
- data/Plugins/Apps/Netdata/Netdata.conf.erb +0 -3
- data/Plugins/Apps/Netdata/Netdata.lmm.rb +0 -1
- data/Plugins/Apps/Nextcloud/Nextcloud.conf.erb +3 -4
- data/Plugins/Apps/Nextcloud/Nextcloud.lmm.rb +150 -68
- data/Plugins/Apps/Nextcloud/autoconfig.php +13 -0
- data/Plugins/Apps/Nextcloud/config.php +10 -1
- data/Plugins/Apps/Nextcloud/nextcloudcron.service +8 -0
- data/Plugins/Apps/Nextcloud/nextcloudcron.timer +10 -0
- data/Plugins/Apps/Nginx/Connection.rb +93 -0
- data/Plugins/Apps/Nginx/conf.d/configlmm.conf +50 -9
- data/Plugins/Apps/Nginx/conf.d/languages.conf +21 -0
- data/Plugins/Apps/Nginx/config-lmm/errors.conf +25 -20
- data/Plugins/Apps/Nginx/config-lmm/gateway-errors.conf +20 -0
- data/Plugins/Apps/Nginx/config-lmm/proxy.conf +1 -1
- data/Plugins/Apps/Nginx/main.conf.erb +7 -3
- data/Plugins/Apps/Nginx/nginx.conf +2 -2
- data/Plugins/Apps/Nginx/nginx.lmm.rb +99 -81
- data/Plugins/Apps/Nginx/proxy.conf.erb +11 -3
- data/Plugins/Apps/Odoo/Odoo.conf.erb +0 -3
- data/Plugins/Apps/Odoo/Odoo.container +5 -0
- data/Plugins/Apps/Odoo/Odoo.lmm.rb +4 -5
- data/Plugins/Apps/Ollama/Ollama.container +26 -0
- data/Plugins/Apps/Ollama/Ollama.lmm.rb +73 -0
- data/Plugins/Apps/OpenTelemetry/Config/config.yaml +704 -0
- data/Plugins/Apps/OpenTelemetry/OpenTelemetry.lmm.rb +154 -0
- data/Plugins/Apps/OpenVidu/Ingress.container +5 -0
- data/Plugins/Apps/OpenVidu/OpenVidu.conf.erb +0 -3
- data/Plugins/Apps/OpenVidu/OpenVidu.container +5 -0
- data/Plugins/Apps/OpenVidu/OpenVidu.lmm.rb +7 -3
- data/Plugins/Apps/OpenVidu/OpenViduCall.conf.erb +0 -3
- data/Plugins/Apps/OpenVidu/OpenViduCall.container +5 -0
- data/Plugins/Apps/PHP-FPM/Connection.rb +91 -0
- data/Plugins/Apps/PHP-FPM/PHP-FPM.lmm.rb +31 -4
- data/Plugins/Apps/Peppermint/Peppermint.conf.erb +2 -5
- data/Plugins/Apps/Peppermint/Peppermint.container +5 -0
- data/Plugins/Apps/Peppermint/Peppermint.lmm.rb +29 -33
- data/Plugins/Apps/Perplexica/Perplexica.container +25 -0
- data/Plugins/Apps/Perplexica/Perplexica.lmm.rb +92 -0
- data/Plugins/Apps/Perplexica/config.toml +26 -0
- data/Plugins/Apps/Podman/Connection.rb +24 -0
- data/Plugins/Apps/Podman/Podman.lmm.rb +80 -0
- data/Plugins/Apps/Podman/storage.conf +6 -0
- data/Plugins/Apps/Postfix/Postfix.lmm.rb +242 -164
- data/Plugins/Apps/PostgreSQL/Connection.rb +97 -0
- data/Plugins/Apps/PostgreSQL/PostgreSQL.lmm.rb +184 -148
- data/Plugins/Apps/Pterodactyl/Pterodactyl.conf.erb +0 -3
- data/Plugins/Apps/Pterodactyl/Pterodactyl.lmm.rb +0 -2
- data/Plugins/Apps/Pterodactyl/Wings.conf.erb +0 -3
- data/Plugins/Apps/RVM/RVM.lmm.rb +57 -0
- data/Plugins/Apps/Roundcube/Roundcube.conf.erb +0 -3
- data/Plugins/Apps/Roundcube/Roundcube.lmm.rb +15 -19
- data/Plugins/Apps/SSH/SSH.lmm.rb +9 -15
- data/Plugins/Apps/SearXNG/SearXNG.container +22 -0
- data/Plugins/Apps/SearXNG/SearXNG.lmm.rb +79 -0
- data/Plugins/Apps/SearXNG/limiter.toml +40 -0
- data/Plugins/Apps/SearXNG/settings.yml +2 -0
- data/Plugins/Apps/SigNoz/Config/alerts.yml +11 -0
- data/Plugins/Apps/SigNoz/Config/otel-collector-config.yaml +110 -0
- data/Plugins/Apps/SigNoz/Config/otel-collector-opamp-config.yaml +1 -0
- data/Plugins/Apps/SigNoz/Config/prometheus.yml +18 -0
- data/Plugins/Apps/SigNoz/SigNoz-Collector.container +23 -0
- data/Plugins/Apps/SigNoz/SigNoz-Migrator.container +17 -0
- data/Plugins/Apps/SigNoz/SigNoz.conf.erb +61 -0
- data/Plugins/Apps/SigNoz/SigNoz.container +26 -0
- data/Plugins/Apps/SigNoz/SigNoz.lmm.rb +319 -0
- data/Plugins/Apps/Solr/log4j2.xml +89 -0
- data/Plugins/Apps/Solr/solr.lmm.rb +82 -0
- data/Plugins/Apps/Sunshine/Sunshine.conf.erb +0 -3
- data/Plugins/Apps/Sunshine/Sunshine.lmm.rb +0 -1
- data/Plugins/Apps/Tunnel/tunnel.lmm.rb +33 -37
- data/Plugins/Apps/UVdesk/UVdesk.conf.erb +0 -3
- data/Plugins/Apps/Umami/Umami.container +19 -0
- data/Plugins/Apps/Umami/Umami.lmm.rb +108 -0
- data/Plugins/Apps/Valkey/Valkey.lmm.rb +54 -42
- data/Plugins/Apps/Vaultwarden/Vaultwarden.conf.erb +9 -6
- data/Plugins/Apps/Vaultwarden/Vaultwarden.container +7 -1
- data/Plugins/Apps/Vaultwarden/Vaultwarden.lmm.rb +64 -29
- data/Plugins/Apps/Wiki.js/Wiki.js.conf.erb +1 -4
- data/Plugins/Apps/Wiki.js/Wiki.js.container +5 -0
- data/Plugins/Apps/Wiki.js/Wiki.js.lmm.rb +31 -37
- data/Plugins/Apps/YaCy/YaCy.conf.erb +93 -0
- data/Plugins/Apps/YaCy/YaCy.container +21 -0
- data/Plugins/Apps/YaCy/YaCy.lmm.rb +160 -0
- data/Plugins/Apps/ZooKeeper/ZooKeeper.container +24 -0
- data/Plugins/Apps/ZooKeeper/ZooKeeper.lmm.rb +68 -0
- data/Plugins/Apps/bitmagnet/bitmagnet.conf.erb +0 -3
- data/Plugins/Apps/bitmagnet/bitmagnet.lmm.rb +0 -1
- data/Plugins/Apps/gollum/gollum.conf.erb +2 -4
- data/Plugins/Apps/gollum/gollum.container +6 -0
- data/Plugins/Apps/gollum/gollum.lmm.rb +51 -50
- data/Plugins/Apps/llama.cpp/llama.cpp.container +28 -0
- data/Plugins/Apps/llama.cpp/llama.cpp.lmm.rb +90 -0
- data/Plugins/Apps/vLLM/vLLM.container +32 -0
- data/Plugins/Apps/vLLM/vLLM.lmm.rb +89 -0
- data/Plugins/OS/General/Utils.lmm.rb +26 -0
- data/Plugins/OS/Linux/Connection.rb +472 -0
- data/Plugins/OS/Linux/Debian/preseed.cfg.erb +25 -6
- data/Plugins/OS/Linux/Flavours.yaml +13 -0
- data/Plugins/OS/Linux/Grub/grub.cfg +10 -0
- data/Plugins/OS/Linux/HTTP.rb +32 -0
- data/Plugins/OS/Linux/Linux.lmm.rb +533 -187
- data/Plugins/OS/Linux/Packages.yaml +20 -1
- data/Plugins/OS/Linux/Services.yaml +8 -0
- data/Plugins/OS/Linux/Shell.rb +70 -0
- data/Plugins/OS/Linux/Syslinux/default +8 -0
- data/Plugins/OS/Linux/WireGuard/WireGuard.lmm.rb +83 -59
- data/Plugins/OS/Linux/WireGuard/wg0.conf.erb +3 -0
- data/Plugins/OS/Linux/openSUSE/autoinst.xml.erb +29 -3
- data/Plugins/OS/Linux/systemd/systemd.lmm.rb +13 -11
- data/Plugins/OS/Routers/Aruba/ArubaInstant.lmm.rb +6 -5
- data/Plugins/Platforms/GitHub.lmm.rb +73 -28
- data/Plugins/Platforms/GoDaddy/GoDaddy.lmm.rb +9 -6
- data/Plugins/Platforms/Proxmox/Proxmox.lmm.rb +402 -0
- data/Plugins/Platforms/Proxmox/XTerm.rb +321 -0
- data/Plugins/Platforms/libvirt/libvirt.lmm.rb +38 -13
- data/Plugins/Platforms/porkbun.lmm.rb +12 -2
- data/Plugins/Platforms/porkbun_spec.rb +2 -2
- data/Plugins/Services/DNS/AmberBit.lmm.rb +1 -1
- data/Plugins/Services/DNS/ArubaItDNS.lmm.rb +1 -1
- data/Plugins/Services/DNS/NICLV.lmm.rb +1 -1
- data/Plugins/Services/DNS/PowerDNS.lmm.rb +70 -68
- data/Plugins/Services/DNS/tonic.lmm.rb +22 -12
- data/lib/ConfigLMM/Framework/plugins/dns.rb +4 -3
- data/lib/ConfigLMM/Framework/plugins/linuxApp.rb +145 -184
- data/lib/ConfigLMM/Framework/plugins/nginxApp.rb +34 -17
- data/lib/ConfigLMM/Framework/plugins/plugin.rb +53 -181
- data/lib/ConfigLMM/Framework/plugins/store.rb +4 -4
- data/lib/ConfigLMM/Framework/variables.rb +75 -0
- data/lib/ConfigLMM/Framework.rb +1 -0
- data/lib/ConfigLMM/cli.rb +12 -6
- data/lib/ConfigLMM/commands/configsCommand.rb +37 -6
- data/lib/ConfigLMM/commands/diff.rb +33 -9
- data/lib/ConfigLMM/context.rb +22 -3
- data/lib/ConfigLMM/io/configList.rb +82 -6
- data/lib/ConfigLMM/io/connection.rb +143 -0
- data/lib/ConfigLMM/io/dhcp.rb +330 -0
- data/lib/ConfigLMM/io/http.rb +78 -0
- data/lib/ConfigLMM/io/local.rb +207 -0
- data/lib/ConfigLMM/io/pxe.rb +92 -0
- data/lib/ConfigLMM/io/ssh.rb +156 -0
- data/lib/ConfigLMM/io/tftp.rb +105 -0
- data/lib/ConfigLMM/io.rb +2 -0
- data/lib/ConfigLMM/secrets/envStore.rb +39 -0
- data/lib/ConfigLMM/secrets/fileStore.rb +43 -0
- data/lib/ConfigLMM/state.rb +2 -1
- data/lib/ConfigLMM/version.rb +2 -1
- data/lib/ConfigLMM.rb +1 -0
- data/{Examples → scripts}/configlmmAuth.sh +7 -5
- metadata +205 -8
@@ -56,7 +56,6 @@ module ConfigLMM
|
|
56
56
|
end
|
57
57
|
|
58
58
|
target['Database'] ||= {}
|
59
|
-
activeState['Database'] = target['Database']
|
60
59
|
if !target['Database']['Type'] || target['Database']['Type'] == 'pgsql'
|
61
60
|
password = SecureRandom.alphanumeric(20)
|
62
61
|
PostgreSQL.createRemoteUserAndDBOverSSH(target['Database'], USER, password, ssh)
|
@@ -105,37 +104,34 @@ module ConfigLMM
|
|
105
104
|
else
|
106
105
|
# TODO
|
107
106
|
end
|
108
|
-
activeState['Status'] = State::STATUS_DEPLOYED
|
109
107
|
end
|
110
108
|
|
111
109
|
def cleanup(configs, state, context, options)
|
112
|
-
cleanupType(:Roundcube, configs, state, context, options) do |item, id, state, context, options,
|
113
|
-
cleanupConfig(item, id, state, context, options,
|
110
|
+
cleanupType(:Roundcube, configs, state, context, options) do |item, id, state, context, options, connection|
|
111
|
+
cleanupConfig(item, id, state, context, options, connection)
|
114
112
|
end
|
115
113
|
end
|
116
114
|
|
117
|
-
def cleanupConfig(item, id, state, context, options,
|
115
|
+
def cleanupConfig(item, id, state, context, options, connection)
|
118
116
|
if item['Proxy'].nil? || item['Proxy']
|
119
|
-
self.cleanupNginxConfig('Roundcube', id, state, context, options,
|
120
|
-
self.class.reload(
|
117
|
+
self.cleanupNginxConfig('Roundcube', id, state, context, options, connection)
|
118
|
+
self.class.reload(connection, options[:dry])
|
121
119
|
end
|
122
|
-
distroInfo = Framework::LinuxApp.currentDistroInfo(
|
123
|
-
rm(PHP_FPM.configDir(distroInfo) + 'roundcube.conf', options[:dry]
|
124
|
-
Framework::LinuxApp.reloadService(PHP_FPM::PHPFPM_SERVICE,
|
125
|
-
Framework::LinuxApp.removePackage(PACKAGE_NAME,
|
120
|
+
distroInfo = Framework::LinuxApp.currentDistroInfo(connection)
|
121
|
+
connection.rm(PHP_FPM.configDir(distroInfo) + 'roundcube.conf', options[:dry])
|
122
|
+
Framework::LinuxApp.reloadService(PHP_FPM::PHPFPM_SERVICE, connection, options[:dry])
|
123
|
+
Framework::LinuxApp.removePackage(PACKAGE_NAME, connection, options[:dry])
|
126
124
|
state.item(id)['Status'] = State::STATUS_DELETED unless options[:dry]
|
127
125
|
if options[:destroy]
|
128
126
|
item['Database'] ||= {}
|
129
127
|
if !item['Database']['Type'] || item['Database']['Type'] == 'pgsql'
|
130
|
-
PostgreSQL.dropUserAndDB(item['Database'], USER,
|
128
|
+
PostgreSQL.dropUserAndDB(item['Database'], USER, connection, options[:dry])
|
131
129
|
end
|
132
|
-
Framework::LinuxApp.deleteUserAndGroup(USER,
|
133
|
-
rm('/var/log/roundcubemail', options[:dry]
|
134
|
-
rm('/var/log/php/roundcube.access.log', options[:dry]
|
135
|
-
rm('/var/log/php/roundcube.errors.log', options[:dry]
|
136
|
-
rm('/var/log/php/roundcube.mail.log', options[:dry]
|
137
|
-
rm('/var/log/nginx/roundcube.access.log', options[:dry], ssh)
|
138
|
-
rm('/var/log/nginx/roundcube.error.log', options[:dry], ssh)
|
130
|
+
Framework::LinuxApp.deleteUserAndGroup(USER, connection, options[:dry])
|
131
|
+
connection.rm('/var/log/roundcubemail', options[:dry])
|
132
|
+
connection.rm('/var/log/php/roundcube.access.log', options[:dry])
|
133
|
+
connection.rm('/var/log/php/roundcube.errors.log', options[:dry])
|
134
|
+
connection.rm('/var/log/php/roundcube.mail.log', options[:dry])
|
139
135
|
state.item(id)['Status'] = State::STATUS_DESTROYED unless options[:dry]
|
140
136
|
end
|
141
137
|
end
|
data/Plugins/Apps/SSH/SSH.lmm.rb
CHANGED
@@ -4,24 +4,21 @@ module ConfigLMM
|
|
4
4
|
class SSH < Framework::LinuxApp
|
5
5
|
|
6
6
|
CONFIG_FILE = '/etc/ssh/sshd_config'
|
7
|
+
SSHD_SERVICE = :sshd
|
7
8
|
|
8
9
|
def actionSSHDeploy(id, target, activeState, context, options)
|
9
|
-
|
10
|
-
|
11
|
-
uri = Addressable::URI.parse(target['Location'])
|
12
|
-
raise Framework::PluginProcessError.new("#{id}: Unknown Protocol: #{uri.scheme}!") if uri.scheme != 'ssh'
|
13
|
-
|
14
|
-
self.class.sshStart(uri) do |ssh|
|
10
|
+
self.withConnection(target['Location'], target) do |connection|
|
11
|
+
Linux.withConnection(connection) do |linuxConnection|
|
15
12
|
if target['Port']
|
16
|
-
|
13
|
+
linuxConnection.fileReplace(CONFIG_FILE, '^Port ', '#Port ', options)
|
17
14
|
end
|
18
15
|
if target['ListenAddress']
|
19
|
-
|
16
|
+
linuxConnection.fileReplace(CONFIG_FILE, '^ListenAddress ', '#ListenAddress ', options)
|
20
17
|
end
|
21
18
|
target['Settings'].to_h.each do |name, value|
|
22
|
-
|
19
|
+
linuxConnection.fileReplace(CONFIG_FILE, "^#{name} ", "##{name} ", options)
|
23
20
|
end
|
24
|
-
|
21
|
+
linuxConnection.updateFile(CONFIG_FILE, options) do |configLines|
|
25
22
|
if target['Port']
|
26
23
|
configLines << "Port #{target['Port']}\n"
|
27
24
|
end
|
@@ -36,16 +33,13 @@ module ConfigLMM
|
|
36
33
|
configLines
|
37
34
|
end
|
38
35
|
if target['Port']
|
39
|
-
|
36
|
+
linuxConnection.firewallAddPort(target['Port'].to_s + '/tcp', options)
|
40
37
|
end
|
38
|
+
linuxConnection.reloadService(SSHD_SERVICE, options)
|
41
39
|
end
|
42
|
-
else
|
43
|
-
# TODO
|
44
40
|
end
|
45
|
-
|
46
41
|
end
|
47
42
|
|
48
43
|
end
|
49
|
-
|
50
44
|
end
|
51
45
|
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
|
2
|
+
[Unit]
|
3
|
+
Description=SearXNG container
|
4
|
+
After=local-fs.target
|
5
|
+
|
6
|
+
[Container]
|
7
|
+
ContainerName=SearXNG
|
8
|
+
Image=docker.io/searxng/searxng:latest
|
9
|
+
EnvironmentFile=/var/lib/searxng/.config/containers/systemd/SearXNG.env
|
10
|
+
Network=slirp4netns:allow_host_loopback=true
|
11
|
+
PublishPort=127.0.0.1:18800:8080
|
12
|
+
UserNS=keep-id:uid=1000,gid=1000
|
13
|
+
Volume=/var/lib/searxng/config:/etc/searxng
|
14
|
+
LogDriver=journald
|
15
|
+
AutoUpdate=registry
|
16
|
+
|
17
|
+
[Service]
|
18
|
+
TimeoutStartSec=6min
|
19
|
+
Restart=on-failure
|
20
|
+
|
21
|
+
[Install]
|
22
|
+
WantedBy=multi-user.target default.target
|
@@ -0,0 +1,79 @@
|
|
1
|
+
|
2
|
+
module ConfigLMM
|
3
|
+
module LMM
|
4
|
+
class SearXNG < Framework::Plugin
|
5
|
+
|
6
|
+
USER = 'searxng'
|
7
|
+
HOME_DIR = '/var/lib/searxng'
|
8
|
+
PORT = 18800
|
9
|
+
|
10
|
+
def actionSearXNGDeploy(id, target, activeState, context, options)
|
11
|
+
|
12
|
+
self.withConnection(target['Location'], target) do |connection|
|
13
|
+
Linux.withConnection(connection) do |linuxConnection|
|
14
|
+
Podman.ensurePresent(linuxConnection, options)
|
15
|
+
Podman.createUser(USER, HOME_DIR, 'SearXNG', linuxConnection, options)
|
16
|
+
linuxConnection.withUserShell(USER) do |shell|
|
17
|
+
shell.createDirs(options, '~/config')
|
18
|
+
end
|
19
|
+
|
20
|
+
if !linuxConnection.filePresent?(HOME_DIR + '/config/limiter.toml', { **options, 'dry': false })
|
21
|
+
linuxConnection.upload(__dir__ + '/limiter.toml', HOME_DIR + '/config/', options)
|
22
|
+
end
|
23
|
+
|
24
|
+
settings = YAML.load_file(__dir__ + '/settings.yml')
|
25
|
+
if target['Settings']
|
26
|
+
settings.merge!(target['Settings'])
|
27
|
+
end
|
28
|
+
settingsFile = options['output'] + '/settings.yml'
|
29
|
+
File.write(settingsFile, settings.to_yaml)
|
30
|
+
linuxConnection.upload(settingsFile, HOME_DIR + '/config/', options)
|
31
|
+
|
32
|
+
path = Podman.containersPath(HOME_DIR)
|
33
|
+
|
34
|
+
secret = SecureRandom.alphanumeric(30)
|
35
|
+
linuxConnection.fileWrite("#{path}/SearXNG.env", "SEARXNG_SECRET=#{secret}", { **options, hide: true })
|
36
|
+
|
37
|
+
if target['Valkey']
|
38
|
+
host = Podman.updateHost(target['Valkey']['Host'])
|
39
|
+
valkeyPassword = nil
|
40
|
+
if target['Valkey']['SecretId']
|
41
|
+
valkeyPassword = context.secrets.load(target['Valkey']['SecretId'], 'VALKEY_PASSWORD')
|
42
|
+
end
|
43
|
+
redisURL = Valkey.connectionURL({ host: host, password: valkeyPassword })
|
44
|
+
linuxConnection.fileAppend("#{path}/SearXNG.env", "SEARXNG_REDIS_URL=#{redisURL}", { **options, hide: true })
|
45
|
+
end
|
46
|
+
|
47
|
+
linuxConnection.setUserGroup("#{path}/SearXNG.env", USER, USER, options)
|
48
|
+
linuxConnection.setPrivate("#{path}/SearXNG.env", options)
|
49
|
+
|
50
|
+
linuxConnection.upload(__dir__ + '/SearXNG.container', path, options)
|
51
|
+
|
52
|
+
linuxConnection.reloadUserServices(USER, options)
|
53
|
+
linuxConnection.restartUserService(USER, 'SearXNG', options)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def cleanup(configs, state, context, options)
|
59
|
+
cleanupType(:SearXNG, configs, state, context, options) do |item, id, state, context, options, connection|
|
60
|
+
Linux.withConnection(connection) do |linuxConnection|
|
61
|
+
|
62
|
+
linuxConnection.stopUserService(USER, 'SearXNG', options)
|
63
|
+
|
64
|
+
path = Podman.containersPath(HOME_DIR)
|
65
|
+
linuxConnection.rm(path + '/SearXNG.container', options[:dry])
|
66
|
+
|
67
|
+
state.item(id)['Status'] = State::STATUS_DELETED unless options[:dry]
|
68
|
+
|
69
|
+
if options[:destroy]
|
70
|
+
linuxConnection.deleteUserAndGroup(USER, options)
|
71
|
+
state.item(id)['Status'] = State::STATUS_DESTROYED unless options[:dry]
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
[real_ip]
|
2
|
+
|
3
|
+
# Number of values to trust for X-Forwarded-For.
|
4
|
+
|
5
|
+
x_for = 1
|
6
|
+
|
7
|
+
# The prefix defines the number of leading bits in an address that are compared
|
8
|
+
# to determine whether or not an address is part of a (client) network.
|
9
|
+
|
10
|
+
ipv4_prefix = 32
|
11
|
+
ipv6_prefix = 48
|
12
|
+
|
13
|
+
[botdetection.ip_limit]
|
14
|
+
|
15
|
+
# To get unlimited access in a local network, by default link-local addresses
|
16
|
+
# (networks) are not monitored by the ip_limit
|
17
|
+
filter_link_local = false
|
18
|
+
|
19
|
+
# activate link_token method in the ip_limit method
|
20
|
+
link_token = false
|
21
|
+
|
22
|
+
[botdetection.ip_lists]
|
23
|
+
|
24
|
+
# In the limiter, the ip_lists method has priority over all other methods -> if
|
25
|
+
# an IP is in the pass_ip list, it has unrestricted access and it is also not
|
26
|
+
# checked if e.g. the "user agent" suggests a bot (e.g. curl).
|
27
|
+
|
28
|
+
block_ip = [
|
29
|
+
# '93.184.216.34', # IPv4 of example.org
|
30
|
+
# '257.1.1.1', # invalid IP --> will be ignored, logged in ERROR class
|
31
|
+
]
|
32
|
+
|
33
|
+
pass_ip = [
|
34
|
+
# '192.168.0.0/16', # IPv4 private network
|
35
|
+
# 'fe80::/10' # IPv6 linklocal / wins over botdetection.ip_limit.filter_link_local
|
36
|
+
]
|
37
|
+
|
38
|
+
# Activate passlist of (hardcoded) IPs from the SearXNG organization,
|
39
|
+
# e.g. `check.searx.space`.
|
40
|
+
pass_searxng_org = true
|
@@ -0,0 +1,11 @@
|
|
1
|
+
groups:
|
2
|
+
- name: ExampleCPULoadGroup
|
3
|
+
rules:
|
4
|
+
- alert: HighCpuLoad
|
5
|
+
expr: system_cpu_load_average_1m > 0.1
|
6
|
+
for: 0m
|
7
|
+
labels:
|
8
|
+
severity: warning
|
9
|
+
annotations:
|
10
|
+
summary: High CPU load
|
11
|
+
description: "CPU load is > 0.1\n VALUE = {{ $value }}\n LABELS = {{ $labels }}"
|
@@ -0,0 +1,110 @@
|
|
1
|
+
receivers:
|
2
|
+
otlp:
|
3
|
+
protocols:
|
4
|
+
grpc:
|
5
|
+
endpoint: 10.0.2.100:4317
|
6
|
+
http:
|
7
|
+
endpoint: 10.0.2.100:4318
|
8
|
+
prometheus:
|
9
|
+
config:
|
10
|
+
global:
|
11
|
+
scrape_interval: 60s
|
12
|
+
scrape_configs:
|
13
|
+
# otel-collector internal metrics
|
14
|
+
- job_name: otel-collector
|
15
|
+
static_configs:
|
16
|
+
- targets:
|
17
|
+
- 127.0.0.1:8888
|
18
|
+
labels:
|
19
|
+
job_name: otel-collector
|
20
|
+
|
21
|
+
processors:
|
22
|
+
batch:
|
23
|
+
send_batch_size: 10000
|
24
|
+
send_batch_max_size: 11000
|
25
|
+
timeout: 10s
|
26
|
+
signozspanmetrics/delta:
|
27
|
+
metrics_exporter: clickhousemetricswrite
|
28
|
+
metrics_flush_interval: 60s
|
29
|
+
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
|
30
|
+
dimensions_cache_size: 100000
|
31
|
+
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
|
32
|
+
enable_exp_histogram: true
|
33
|
+
dimensions:
|
34
|
+
- name: service.namespace
|
35
|
+
default: default
|
36
|
+
- name: deployment.environment
|
37
|
+
default: default
|
38
|
+
# This is added to ensure the uniqueness of the timeseries
|
39
|
+
# Otherwise, identical timeseries produced by multiple replicas of
|
40
|
+
# collectors result in incorrect APM metrics
|
41
|
+
- name: signoz.collector.id
|
42
|
+
- name: service.version
|
43
|
+
- name: browser.platform
|
44
|
+
- name: browser.mobile
|
45
|
+
- name: k8s.cluster.name
|
46
|
+
- name: k8s.node.name
|
47
|
+
- name: k8s.namespace.name
|
48
|
+
- name: host.name
|
49
|
+
- name: host.type
|
50
|
+
- name: container.name
|
51
|
+
|
52
|
+
extensions:
|
53
|
+
health_check:
|
54
|
+
endpoint: 10.0.2.100:13133
|
55
|
+
zpages:
|
56
|
+
endpoint: 10.0.2.100:55679
|
57
|
+
pprof:
|
58
|
+
endpoint: 10.0.2.100:1777
|
59
|
+
|
60
|
+
exporters:
|
61
|
+
clickhousetraces:
|
62
|
+
datasource: tcp://10.0.2.2:19100/signoz_traces
|
63
|
+
low_cardinal_exception_grouping: false
|
64
|
+
use_new_schema: true
|
65
|
+
clickhousemetricswrite:
|
66
|
+
endpoint: tcp://10.0.2.2:19100/signoz_metrics
|
67
|
+
resource_to_telemetry_conversion:
|
68
|
+
enabled: true
|
69
|
+
clickhousemetricswrite/prometheus:
|
70
|
+
endpoint: tcp://10.0.2.2:19100/signoz_metrics
|
71
|
+
signozclickhousemetrics:
|
72
|
+
dsn: tcp://10.0.2.2:19100/signoz_metrics
|
73
|
+
clickhouselogsexporter:
|
74
|
+
dsn: tcp://10.0.2.2:19100/signoz_logs
|
75
|
+
timeout: 10s
|
76
|
+
use_new_schema: true
|
77
|
+
# logging: {}
|
78
|
+
|
79
|
+
service:
|
80
|
+
telemetry:
|
81
|
+
logs:
|
82
|
+
encoding: json
|
83
|
+
metrics:
|
84
|
+
readers:
|
85
|
+
- pull:
|
86
|
+
exporter:
|
87
|
+
prometheus:
|
88
|
+
host: 127.0.0.1
|
89
|
+
port: 8888
|
90
|
+
extensions:
|
91
|
+
- health_check
|
92
|
+
- zpages
|
93
|
+
- pprof
|
94
|
+
pipelines:
|
95
|
+
traces:
|
96
|
+
receivers: [otlp]
|
97
|
+
processors: [signozspanmetrics/delta, batch]
|
98
|
+
exporters: [clickhousetraces]
|
99
|
+
metrics:
|
100
|
+
receivers: [otlp]
|
101
|
+
processors: [batch]
|
102
|
+
exporters: [clickhousemetricswrite, signozclickhousemetrics]
|
103
|
+
metrics/prometheus:
|
104
|
+
receivers: [prometheus]
|
105
|
+
processors: [batch]
|
106
|
+
exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics]
|
107
|
+
logs:
|
108
|
+
receivers: [otlp]
|
109
|
+
processors: [batch]
|
110
|
+
exporters: [clickhouselogsexporter]
|
@@ -0,0 +1 @@
|
|
1
|
+
server_endpoint: ws://10.0.2.2:4320/v1/opamp
|
@@ -0,0 +1,18 @@
|
|
1
|
+
# my global config
|
2
|
+
global:
|
3
|
+
scrape_interval: 5s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
|
4
|
+
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
|
5
|
+
# scrape_timeout is set to the global default (10s).
|
6
|
+
|
7
|
+
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
|
8
|
+
rule_files:
|
9
|
+
# - "first_rules.yml"
|
10
|
+
# - "second_rules.yml"
|
11
|
+
- 'alerts.yml'
|
12
|
+
|
13
|
+
# A scrape configuration containing exactly one endpoint to scrape:
|
14
|
+
# Here it's Prometheus itself.
|
15
|
+
scrape_configs: []
|
16
|
+
|
17
|
+
remote_read:
|
18
|
+
- url: tcp://10.0.2.2:19100/signoz_metrics
|
@@ -0,0 +1,23 @@
|
|
1
|
+
|
2
|
+
[Unit]
|
3
|
+
Description=OpenTelemetry Collector container
|
4
|
+
After=local-fs.target
|
5
|
+
|
6
|
+
[Container]
|
7
|
+
ContainerName=SigNoz-Collector
|
8
|
+
Image=docker.io/signoz/signoz-otel-collector:$VERSION
|
9
|
+
Exec=--config=/etc/otel/otel-collector-config.yaml --copy-path=/var/tmp/collector-config.yaml --manager-config=/etc/otel/otel-collector-opamp-config.yaml
|
10
|
+
Network=slirp4netns:allow_host_loopback=true
|
11
|
+
PublishPort=127.0.0.1:4317:4317
|
12
|
+
PublishPort=127.0.0.1:4318:4318
|
13
|
+
Volume=/var/lib/signoz-collector/otel-collector-config.yaml:/etc/otel/otel-collector-config.yaml
|
14
|
+
Volume=/var/lib/signoz-collector/otel-collector-opamp-config.yaml:/etc/otel/otel-collector-opamp-config.yaml
|
15
|
+
LogDriver=journald
|
16
|
+
AutoUpdate=registry
|
17
|
+
|
18
|
+
[Service]
|
19
|
+
TimeoutStartSec=6min
|
20
|
+
Restart=on-failure
|
21
|
+
|
22
|
+
[Install]
|
23
|
+
WantedBy=multi-user.target default.target
|
@@ -0,0 +1,17 @@
|
|
1
|
+
[Unit]
|
2
|
+
Description=SigNoz Migrator container
|
3
|
+
After=local-fs.target
|
4
|
+
|
5
|
+
[Container]
|
6
|
+
ContainerName=SigNoz-Migrator
|
7
|
+
Image=docker.io/signoz/signoz-schema-migrator:$VERSION
|
8
|
+
Exec=sync --cluster-name default --dsn=$DSN
|
9
|
+
Network=slirp4netns:allow_host_loopback=true
|
10
|
+
LogDriver=journald
|
11
|
+
AutoUpdate=registry
|
12
|
+
|
13
|
+
[Service]
|
14
|
+
TimeoutStartSec=10min
|
15
|
+
|
16
|
+
[Install]
|
17
|
+
WantedBy=multi-user.target default.target
|
@@ -0,0 +1,61 @@
|
|
1
|
+
|
2
|
+
upstream <%= config['Name'] %> {
|
3
|
+
server <%= config['Server'] %>;
|
4
|
+
}
|
5
|
+
|
6
|
+
server {
|
7
|
+
<% if config['NginxVersion'] >= 1.25 %>
|
8
|
+
listen 443 ssl;
|
9
|
+
listen [::]:443 ssl;
|
10
|
+
http2 on;
|
11
|
+
http3 on;
|
12
|
+
quic_retry on;
|
13
|
+
add_header Alt-Svc 'h3=":443"; ma=86400';
|
14
|
+
<% else %>
|
15
|
+
listen 443 ssl http2;
|
16
|
+
listen [::]:443 ssl http2;
|
17
|
+
<% end %>
|
18
|
+
|
19
|
+
include config-lmm/errors.conf;
|
20
|
+
include config-lmm/security.conf;
|
21
|
+
include config-lmm/ssl.conf;
|
22
|
+
|
23
|
+
server_name <%= config['Domain'] %>;
|
24
|
+
|
25
|
+
<% if config['CertName'] %>
|
26
|
+
ssl_certificate "/etc/letsencrypt/live/<%= config['CertName'] %>/fullchain.pem";
|
27
|
+
ssl_certificate_key "/etc/letsencrypt/live/<%= config['CertName'] %>/privkey.pem";
|
28
|
+
ssl_trusted_certificate "/etc/letsencrypt/live/<%= config['CertName'] %>/chain.pem";
|
29
|
+
<% end %>
|
30
|
+
|
31
|
+
client_max_body_size 24M;
|
32
|
+
large_client_header_buffers 8 128k;
|
33
|
+
|
34
|
+
location / {
|
35
|
+
proxy_pass http://<%= config['Name'] %>;
|
36
|
+
include config-lmm/proxy.conf;
|
37
|
+
}
|
38
|
+
|
39
|
+
location ~ ^/api/(v1|v3)/logs/(tail|livetail){
|
40
|
+
proxy_pass http://<%= config['Name'] %>;
|
41
|
+
|
42
|
+
# connection will be closed if no data is read for 600s between successive read operations
|
43
|
+
proxy_read_timeout 600s;
|
44
|
+
|
45
|
+
# dont buffer the data send it directly to client.
|
46
|
+
proxy_buffering off;
|
47
|
+
proxy_cache off;
|
48
|
+
}
|
49
|
+
|
50
|
+
location /api {
|
51
|
+
proxy_pass http://<%= config['Name'] %>/api;
|
52
|
+
# connection will be closed if no data is read for 600s between successive read operations
|
53
|
+
proxy_read_timeout 600s;
|
54
|
+
}
|
55
|
+
|
56
|
+
location /ws {
|
57
|
+
proxy_pass http://<%= config['Name'] %>/ws;
|
58
|
+
proxy_read_timeout 600s;
|
59
|
+
}
|
60
|
+
|
61
|
+
}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
|
2
|
+
[Unit]
|
3
|
+
Description=SigNoz container
|
4
|
+
After=local-fs.target
|
5
|
+
|
6
|
+
[Container]
|
7
|
+
ContainerName=SigNoz
|
8
|
+
Image=docker.io/signoz/signoz-community:$VERSION
|
9
|
+
Exec=--use-logs-new-schema=true --use-trace-new-schema=true --cluster default
|
10
|
+
EnvironmentFile=/var/lib/signoz/.config/containers/systemd/SigNoz.env
|
11
|
+
Network=slirp4netns:allow_host_loopback=true
|
12
|
+
PublishPort=127.0.0.1:4320:4320
|
13
|
+
PublishPort=127.0.0.1:6060:6060
|
14
|
+
PublishPort=127.0.0.1:18600:8080
|
15
|
+
Volume=/var/lib/signoz/config:/root/config
|
16
|
+
Volume=/var/lib/signoz/data:/var/lib/signoz
|
17
|
+
LogDriver=journald
|
18
|
+
AutoUpdate=registry
|
19
|
+
|
20
|
+
[Service]
|
21
|
+
TimeoutStartSec=6min
|
22
|
+
TimeoutStopSec=3min
|
23
|
+
Restart=on-failure
|
24
|
+
|
25
|
+
[Install]
|
26
|
+
WantedBy=multi-user.target default.target
|