deleted coturn

This commit is contained in:
see 2022-05-20 11:12:18 +02:00
parent 52d59f187c
commit ac7807bea4
10 changed files with 686 additions and 212 deletions

View file

@ -1,49 +0,0 @@
from os import environ
from pybuilder.core import task, init
from ddadevops import *
import logging
name = 'c4k-coturn'
MODULE = 'docker'
PROJECT_ROOT_PATH = '../..'
class MyBuild(DevopsDockerBuild):
pass
@init
def initialize(project):
project.build_depends_on('ddadevops>=0.12.7')
stage = 'notused'
dockerhub_user = environ.get('DOCKERHUB_USER')
if not dockerhub_user:
dockerhub_user = gopass_field_from_path('meissa/web/docker.com', 'login')
dockerhub_password = environ.get('DOCKERHUB_PASSWORD')
if not dockerhub_password:
dockerhub_password = gopass_password_from_path('meissa/web/docker.com')
config = create_devops_docker_build_config(
stage, PROJECT_ROOT_PATH, MODULE, dockerhub_user, dockerhub_password)
build = MyBuild(project, config)
build.initialize_build_dir()
@task
def image(project):
build = get_devops_build(project)
build.image()
@task
def drun(project):
build = get_devops_build(project)
build.drun()
@task
def test(project):
build = get_devops_build(project)
build.test()
@task
def publish(project):
build = get_devops_build(project)
build.dockerhub_login()
build.dockerhub_publish()

View file

@ -1,7 +0,0 @@
FROM coturn/coturn:4.5.2-r11
# Prepare Configuration
ADD resources /tmp
RUN /tmp/install.sh
ENTRYPOINT ["/entrypoint.sh"]

View file

@ -1,2 +0,0 @@
#!/bin/bash

View file

@ -1,3 +0,0 @@
#!/bin/bash
apt update && apt -qqy install vim bash-completion less

View file

@ -1,8 +0,0 @@
#!/bin/bash
set -Eeo pipefail
apt update > /dev/null
install -m 0700 /tmp/install-debug.sh /usr/local/bin/
install -m 0600 /tmp/turnserver.conf /etc/coturn/turnserver.conf
install -m 0700 /tmp/entrypoint.sh /entrypoint.sh

View file

@ -1,70 +0,0 @@
# location of example configuration
## https://github.com/coturn/coturn/blob/master/docker/coturn/
## https://github.com/coturn/coturn/blob/master/README.turnserver - Line 666 for TURN REST API
listening-port=3478
tls-listening-port=5349
#listening-ip=<eureIP> # Automatically Detected
#relay-ip=<eureIP> # Automatically Detected
fingerprint
#use-auth-secret # Authentication Issue
#static-auth-secret=<euerSecret> # Authentication Issue
#realm=stun.kuketz-meet.de # Automatically detected
total-quota=100
bps-capacity=0
no-udp #??
no-tcp #??
# stale-nonce=600 # Authentication Issue - when to force re-authentication in seconds
cert=/etc/ssl/certs/stun.kuketz-meet_ecdsa.pem # same as jitsi?
pkey=/etc/ssl/private/stun.kuketz-meet_ecdsa.key # same as jitsi?
cipher-list="ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256"
ec-curve-name=secp384r1
# dh-file=/etc/ssl/certs/dhparam.pem # should we use custom DH TLS key?
no-stdout-log
log-file=/var/log/coturn.log
# simple-log # We want logs with pid and date in the name
no-multicast-peers
# cli-port=5766 # do we want cli access?
# cli-password=SOME_SALTED_PW # do we want a cli password?
no-tlsv1
no-tlsv1_1
# Authentication Issue -> restrict access to only jitsi server ?
## Authentication mechanisms:
## no-auth [default] - allows anonymous access
## lt-cred-mech - long time credential mechanism, do not use with use-auth-secret
## oauth - sets the possibility for oAuth
## server-name= - sets the server name which is used for oAuth
## [define a user in config (this file), command line, or userdb file] - sets lt-cred-mech as default
### user=username1:key1
### or
### user=username1:password1
### Keys must be generated by turnadmin utility. The key value depends
### on user name, realm, and password:
###
### Example:
### $ turnadmin -k -u ninefingers -r north.gov -p youhavetoberealistic
### Output: 0xbc807ee29df3c9ffa736523fb2c4e8ee
### ('0x' in the beginning of the key is what differentiates the key from
### password. If it has 0x then it is a key, otherwise it is a password).
###
### The corresponding user account entry in the config file will be:
###
### user=ninefingers:0xbc807ee29df3c9ffa736523fb2c4e8ee
### Or, equivalently, with open clear password (less secure):
### user=ninefingers:youhavetoberealistic
## use-auth-secret - sets the TURN REST API flag
### This option is used with timestamp:
### usercombo -> "timestamp:userid"
### turn user -> usercombo
### turn password -> base64(hmac(secret key, usercombo))
## we can use a database for storing users and secrets for the TURN REST API
# CLI Security
## do we want a password?
# TODO import from env variables
# TODO find paths for cert and pkey

233
jitsi.yaml Normal file
View file

@ -0,0 +1,233 @@
kind: Ingress
apiVersion: networking.k8s.io/v1
metadata:
name: jitsi
annotations:
cert-manager.io/cluster-issuer: letsencrypt-staging-issuer
ingress.kubernetes.io/ssl-redirect: 'true'
kubernetes.io/ingress.class: ''
spec:
tls:
- hosts:
- fqdn
secretName: tls-jitsi
rules:
- host: jitsi.test.meissa-gmbh.de
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: web
port:
number: 80
---
apiVersion: v1
kind: Secret
metadata:
name: jitsi-config
type: Opaque
data:
JVB_AUTH_PASSWORD: SnZiQXV0aA==
JICOFO_AUTH_PASSWORD: Smljb2ZvQXV0aA==
JICOFO_COMPONENT_SECRET: Smljb2ZvQ29tcFNlYw==
---
apiVersion: v1
kind: Service
metadata:
labels:
service: jvb
name: jvb-udp
spec:
type: NodePort
externalTrafficPolicy: Cluster
ports:
- port: 30300
protocol: UDP
targetPort: 30300
nodePort: 30300
selector:
app: jitsi
---
apiVersion: v1
kind: Service
metadata:
labels:
service: web
name: web
spec:
ports:
- name: http
port: 80
targetPort: 80
- name: https
port: 443
targetPort: 443
selector:
app: jitsi
---
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
app: jitsi
name: jitsi
spec:
strategy:
type: Recreate
selector:
matchLabels:
app: jitsi
template:
metadata:
labels:
app: jitsi
spec:
containers:
- name: jicofo
image: jitsi/jicofo:stable-6826
imagePullPolicy: IfNotPresent
env:
- name: XMPP_SERVER
value: localhost
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_MUC_DOMAIN
value: muc.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: JICOFO_COMPONENT_SECRET
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_COMPONENT_SECRET
- name: JICOFO_AUTH_USER
value: focus
- name: JICOFO_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_AUTH_PASSWORD
- name: TZ
value: Europe/Berlin
- name: JVB_BREWERY_MUC
value: jvbbrewery
- name: prosody
image: jitsi/prosody:stable-6826
imagePullPolicy: IfNotPresent
env:
- name: PUBLIC_URL
value: jitsi.test.meissa-gmbh.de
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_MUC_DOMAIN
value: muc.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: JICOFO_COMPONENT_SECRET
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_COMPONENT_SECRET
- name: JVB_AUTH_USER
value: jvb
- name: JVB_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JVB_AUTH_PASSWORD
- name: JICOFO_AUTH_USER
value: focus
- name: JICOFO_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_AUTH_PASSWORD
- name: TZ
value: Europe/Berlin
- name: JVB_TCP_HARVESTER_DISABLED
value: 'true'
- name: web
image: domaindrivenarchitecture/c4k-jitsi
imagePullPolicy: IfNotPresent
env:
- name: PUBLIC_URL
value: jitsi.test.meissa-gmbh.de
- name: XMPP_SERVER
value: localhost
- name: JICOFO_AUTH_USER
value: focus
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: XMPP_BOSH_URL_BASE
value: http://127.0.0.1:5280
- name: XMPP_MUC_DOMAIN
value: muc.meet.jitsi
- name: TZ
value: Europe/Berlin
- name: JVB_TCP_HARVESTER_DISABLED
value: 'true'
- name: DEFAULT_LANGUAGE
value: de
- name: RESOLUTION
value: '480'
- name: RESOLUTION_MIN
value: '240'
- name: RESOLUTION_WIDTH
value: '853'
- name: RESOLUTION_WIDTH_MIN
value: '427'
- name: DISABLE_AUDIO_LEVELS
value: 'true'
- name: jvb
image: jitsi/jvb:stable-6826
imagePullPolicy: IfNotPresent
env:
- name: XMPP_SERVER
value: localhost
- name: DOCKER_HOST_ADDRESS
value: localhost
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: JVB_STUN_SERVERS
value: stun.1und1.de:3478,stun.t-online.de:3478,stun.hosteurope.de:3478
- name: JICOFO_AUTH_USER
value: focus
- name: JVB_TCP_HARVESTER_DISABLED
value: 'true'
- name: JVB_AUTH_USER
value: jvb
- name: JVB_PORT
value: '30300'
- name: JVB_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JVB_AUTH_PASSWORD
- name: JICOFO_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_AUTH_PASSWORD
- name: JVB_BREWERY_MUC
value: jvbbrewery
- name: TZ
value: Europe/Berlin

View file

@ -0,0 +1,453 @@
14:44:10.320 [main] INFO o.d.p.f.c.processors.RemoteProcessor - Connecting to jitsi.test.meissa-gmbh.de/49.12.243.171 with user: root with ssh-key
14:44:10.717 [main] INFO c.h.s.u.k.OpenSSHKeyV1KeyFile - Read key type: ssh-rsa
14:44:10.815 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:12.201 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "timeout 1 sudo id"
14:44:12.310 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: uid=0(root) gid=0(root) groups=0(root)
, [/bin/bash, -c, timeout 1 sudo id]
14:44:12.385 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "test -e /etc/netplan/99-loopback.yaml"
14:44:12.443 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, test -e /etc/netplan/99-loopback.yaml]
14:44:12.460 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/netplan/99-loopback.yaml"
14:44:12.532 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/netplan/99-loopback.yaml]
14:44:12.534 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:12.607 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "netplan apply""
14:44:13.851 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "netplan apply"]
14:44:13.852 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "test -e /etc/rancher/k3s/config.yaml"
14:44:13.911 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, test -e /etc/rancher/k3s/config.yaml]
14:44:13.913 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /etc/kubernetes/""
14:44:13.981 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /etc/kubernetes/"]
14:44:13.983 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /etc/kubernetes/""
14:44:14.055 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /etc/kubernetes/"]
14:44:14.056 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /var/lib/rancher/k3s/server/manifests/""
14:44:14.127 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /var/lib/rancher/k3s/server/manifests/"]
14:44:14.128 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /var/lib/rancher/k3s/server/manifests/""
14:44:14.197 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /var/lib/rancher/k3s/server/manifests/"]
14:44:14.198 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /etc/rancher/k3s/manifests/""
14:44:14.267 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /etc/rancher/k3s/manifests/"]
14:44:14.267 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /etc/rancher/k3s/manifests/""
14:44:14.337 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /etc/rancher/k3s/manifests/"]
14:44:14.338 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /var/pvc1""
14:44:14.400 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /var/pvc1"]
14:44:14.401 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc1""
14:44:14.470 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc1"]
14:44:14.470 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /var/pvc2""
14:44:14.537 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /var/pvc2"]
14:44:14.537 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc2""
14:44:14.608 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc2"]
14:44:14.612 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/config.yaml"
14:44:14.684 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/config.yaml]
14:44:14.684 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:14.757 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 755 /dev/null /usr/local/bin/k3s-install.sh"
14:44:14.827 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 755 /dev/null /usr/local/bin/k3s-install.sh]
14:44:14.827 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:14.936 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "INSTALL_K3S_CHANNEL=latest k3s-install.sh"
14:44:47.826 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: [INFO] Finding release for channel latest
[INFO] Using v1.23.5+k3s1 as release
[INFO] Downloading hash https://github.com/k3s-io/k3s/releases/download/v1.23.5+k3s1/sha256sum-amd64.txt
[INFO] Downloading binary https://github.com/k3s-io/k3s/releases/download/v1.23.5+k3s1/k3s
[INFO] Verifying binary download
[INFO] Installing k3s to /usr/local/bin/k3s
[INFO] Skipping installation of SELinux RPM
[INFO] Creating /usr/local/bin/kubectl symlink to k3s
[INFO] Creating /usr/local/bin/crictl symlink to k3s
[INFO] Creating /usr/local/bin/ctr symlink to k3s
[INFO] Creating killall script /usr/local/bin/k3s-killall.sh
[INFO] Creating uninstall script /usr/local/bin/k3s-uninstall.sh
[INFO] env: Creating environment file /etc/systemd/system/k3s.service.env
[INFO] systemd: Creating service file /etc/systemd/system/k3s.service
[INFO] systemd: Enabling k3s unit
[INFO] systemd: Starting k3s
, Err: Created symlink /etc/systemd/system/multi-user.target.wants/k3s.service → /etc/systemd/system/k3s.service.
[/bin/bash, -c, INSTALL_K3S_CHANNEL=latest k3s-install.sh]
14:44:47.829 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-namespace.yaml"
14:44:47.889 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-namespace.yaml]
14:44:47.889 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:47.955 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-namespace.yaml""
14:44:49.360 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: namespace/metallb-system created
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-namespace.yaml"]
14:44:49.363 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml"
14:44:49.426 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml]
14:44:49.426 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:49.495 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml""
14:44:50.193 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: podsecuritypolicy.policy/controller created
podsecuritypolicy.policy/speaker created
serviceaccount/controller created
serviceaccount/speaker created
clusterrole.rbac.authorization.k8s.io/metallb-system:controller created
clusterrole.rbac.authorization.k8s.io/metallb-system:speaker created
role.rbac.authorization.k8s.io/config-watcher created
role.rbac.authorization.k8s.io/pod-lister created
role.rbac.authorization.k8s.io/controller created
clusterrolebinding.rbac.authorization.k8s.io/metallb-system:controller created
clusterrolebinding.rbac.authorization.k8s.io/metallb-system:speaker created
rolebinding.rbac.authorization.k8s.io/config-watcher created
rolebinding.rbac.authorization.k8s.io/pod-lister created
rolebinding.rbac.authorization.k8s.io/controller created
daemonset.apps/speaker created
deployment.apps/controller created
, Err: Warning: policy/v1beta1 PodSecurityPolicy is deprecated in v1.21+, unavailable in v1.25+
[/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml"]
14:44:50.196 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-config.yaml"
14:44:50.259 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-config.yaml]
14:44:50.259 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:50.331 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-config.yaml""
14:44:50.897 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: configmap/config created
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-config.yaml"]
14:44:50.900 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/traefik.yaml"
14:44:50.967 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/traefik.yaml]
14:44:50.968 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:51.038 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/traefik.yaml""
14:44:51.614 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: helmchart.helm.cattle.io/traefik-crd created
helmchart.helm.cattle.io/traefik created
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/traefik.yaml"]
14:44:51.615 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml"
14:44:51.674 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml]
14:44:51.675 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:51.739 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml""
14:44:52.331 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: configmap/local-path-config configured
, Err: Warning: resource configmaps/local-path-config is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.
[/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml"]
14:44:52.332 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "kubectl set env deployment -n kube-system local-path-provisioner DEPLOY_DATE="$(date)""
14:44:52.844 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: deployment.apps/local-path-provisioner env updated
, [/bin/bash, -c, kubectl set env deployment -n kube-system local-path-provisioner DEPLOY_DATE="$(date)"]
14:44:52.845 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "ln -sf /etc/rancher/k3s/k3s.yaml /etc/kubernetes/admin.conf""
14:44:52.910 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "ln -sf /etc/rancher/k3s/k3s.yaml /etc/kubernetes/admin.conf"]
14:44:52.933 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/cert-manager.yaml"
14:44:53.000 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/cert-manager.yaml]
14:44:53.002 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.137 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.233 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.327 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.414 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.522 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.615 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.709 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.803 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:53.903 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.000 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.093 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.194 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.294 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.390 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.485 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.579 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.676 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.773 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.870 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:54.965 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.058 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.144 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.241 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.341 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.451 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.555 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.656 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.751 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:44:55.844 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/cert-manager.yaml""
14:45:00.063 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created
customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created
customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created
customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created
customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created
customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created
namespace/cert-manager created
serviceaccount/cert-manager-cainjector created
serviceaccount/cert-manager created
serviceaccount/cert-manager-webhook created
clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
clusterrole.rbac.authorization.k8s.io/cert-manager-view created
clusterrole.rbac.authorization.k8s.io/cert-manager-edit created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created
clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
role.rbac.authorization.k8s.io/cert-manager:leaderelection created
role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created
rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
service/cert-manager created
service/cert-manager-webhook created
deployment.apps/cert-manager-cainjector created
deployment.apps/cert-manager created
deployment.apps/cert-manager-webhook created
mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created
validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/cert-manager.yaml"]
14:45:00.066 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/le-issuer.yaml"
14:45:00.128 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/le-issuer.yaml]
14:45:00.128 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:45:00.188 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml""
14:45:02.889 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, Err: Error from server (InternalError): error when creating "/etc/rancher/k3s/manifests/le-issuer.yaml": Internal error occurred: failed calling webhook "webhook.cert-manager.io": failed to call webhook: Post "https://cert-manager-webhook.cert-manager.svc:443/mutate?timeout=10s": dial tcp 10.43.225.199:443: connect: connection refused
[/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml"]
14:45:12.914 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml""
14:45:21.515 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: clusterissuer.cert-manager.io/staging created
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml"]
14:45:21.519 [main] INFO o.d.p.f.c.processors.LocalProcessor - os.name: Linux
14:45:21.519 [main] INFO o.d.p.f.c.processors.LocalProcessor - user.home: /home/erik
14:45:21.519 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "xxxxxxxx"
14:45:21.530 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "xxxxxxxx"
14:45:21.542 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "wc -c < /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml"
14:45:21.586 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: 5794
, [/bin/bash, -c, wc -c < /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml]
14:45:21.587 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cat /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml"
14:45:21.592 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: kind: Ingress
apiVersion: networking.k8s.io/v1
metadata:
name: jitsi
annotations:
cert-manager.io/cluster-issuer: letsencrypt-staging-issuer
ingress.kubernetes.io/ssl-redirect: 'true'
kubernetes.io/ingress.class: ''
spec:
tls:
- hosts:
- fqdn
secretName: tls-jitsi
rules:
- host: jitsi.test.meissa-gmbh.de
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: web
port:
number: 80
---
apiVersion: v1
kind: Secret
metadata:
name: jitsi-config
type: Opaque
data:
JVB_AUTH_PASSWORD: SnZiQXV0aA==
JICOFO_AUTH_PASSWORD: Smljb2ZvQXV0aA==
JICOFO_COMPONENT_SECRET: Smljb2ZvQ29tcFNlYw==
---
apiVersion: v1
kind: Service
metadata:
labels:
service: jvb
name: jvb-udp
spec:
type: NodePort
externalTrafficPolicy: Cluster
ports:
- port: 30300
protocol: UDP
targetPort: 30300
nodePort: 30300
selector:
app: jitsi
---
apiVersion: v1
kind: Service
metadata:
labels:
service: web
name: web
spec:
ports:
- name: http
port: 80
targetPort: 80
- name: https
port: 443
targetPort: 443
selector:
app: jitsi
---
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
app: jitsi
name: jitsi
spec:
strategy:
type: Recreate
selector:
matchLabels:
app: jitsi
template:
metadata:
labels:
app: jitsi
spec:
containers:
- name: jicofo
image: jitsi/jicofo:stable-6826
imagePullPolicy: IfNotPresent
env:
- name: XMPP_SERVER
value: localhost
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_MUC_DOMAIN
value: muc.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: JICOFO_COMPONENT_SECRET
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_COMPONENT_SECRET
- name: JICOFO_AUTH_USER
value: focus
- name: JICOFO_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_AUTH_PASSWORD
- name: TZ
value: Europe/Berlin
- name: JVB_BREWERY_MUC
value: jvbbrewery
- name: prosody
image: jitsi/prosody:stable-6826
imagePullPolicy: IfNotPresent
env:
- name: PUBLIC_URL
value: jitsi.test.meissa-gmbh.de
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_MUC_DOMAIN
value: muc.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: JICOFO_COMPONENT_SECRET
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_COMPONENT_SECRET
- name: JVB_AUTH_USER
value: jvb
- name: JVB_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JVB_AUTH_PASSWORD
- name: JICOFO_AUTH_USER
value: focus
- name: JICOFO_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_AUTH_PASSWORD
- name: TZ
value: Europe/Berlin
- name: JVB_TCP_HARVESTER_DISABLED
value: 'true'
- name: web
image: domaindrivenarchitecture/c4k-jitsi
imagePullPolicy: IfNotPresent
env:
- name: PUBLIC_URL
value: jitsi.test.meissa-gmbh.de
- name: XMPP_SERVER
value: localhost
- name: JICOFO_AUTH_USER
value: focus
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: XMPP_BOSH_URL_BASE
value: http://127.0.0.1:5280
- name: XMPP_MUC_DOMAIN
value: muc.meet.jitsi
- name: TZ
value: Europe/Berlin
- name: JVB_TCP_HARVESTER_DISABLED
value: 'true'
- name: DEFAULT_LANGUAGE
value: de
- name: RESOLUTION
value: '480'
- name: RESOLUTION_MIN
value: '240'
- name: RESOLUTION_WIDTH
value: '853'
- name: RESOLUTION_WIDTH_MIN
value: '427'
- name: DISABLE_AUDIO_LEVELS
value: 'true'
- name: jvb
image: jitsi/jvb:stable-6826
imagePullPolicy: IfNotPresent
env:
- name: XMPP_SERVER
value: localhost
- name: DOCKER_HOST_ADDRESS
value: localhost
- name: XMPP_DOMAIN
value: meet.jitsi
- name: XMPP_AUTH_DOMAIN
value: auth.meet.jitsi
- name: XMPP_INTERNAL_MUC_DOMAIN
value: internal-muc.meet.jitsi
- name: JVB_STUN_SERVERS
value: stun.1und1.de:3478,stun.t-online.de:3478,stun.hosteurope.de:3478
- name: JICOFO_AUTH_USER
value: focus
- name: JVB_TCP_HARVESTER_DISABLED
value: 'true'
- name: JVB_AUTH_USER
value: jvb
- name: JVB_PORT
value: '30300'
- name: JVB_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JVB_AUTH_PASSWORD
- name: JICOFO_AUTH_PASSWORD
valueFrom:
secretKeyRef:
name: jitsi-config
key: JICOFO_AUTH_PASSWORD
- name: JVB_BREWERY_MUC
value: jvbbrewery
- name: TZ
value: Europe/Berlin
, [/bin/bash, -c, cat /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml]
14:45:21.593 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/application.yaml"
14:45:21.655 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/application.yaml]
14:45:21.656 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
14:45:21.739 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/application.yaml""
14:45:23.388 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: ingress.networking.k8s.io/jitsi created
secret/jitsi-config created
service/jvb-udp created
service/web created
deployment.apps/jitsi created
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/application.yaml"]
14:45:23.389 [main] INFO o.d.p.f.c.processors.RemoteProcessor - Disconnecting ssh.

View file

@ -1,42 +0,0 @@
apiVersion: apps/v1
kind: DaemonSet
metadata:
namespace: coturn
name: coturn
labels:
app.kubernetes.io/name: coturn
app.kubernetes.io/instance: coturn
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: coturn
app.kubernetes.io/instance: coturn
template:
metadata:
labels:
app.kubernetes.io/name: coturn
app.kubernetes.io/instance: coturn
spec:
hostNetwork: true
containers:
- name: coturn
image: coturn/coturn:4.5.2-r11
imagePullPolicy: IfNotPresent
ports:
- name: turn-port1-udp
containerPort: 3478
hostPort: 3478
protocol: UDP
- name: turn-port1-tcp
containerPort: 3478
hostPort: 3478
protocol: TCP
- name: turn-port2-udp
containerPort: 5349
hostPort: 5349
protocol: UDP
- name: turn-port2-tcp
containerPort: 5349
hostPort: 5349
protocol: TCP

View file

@ -1,31 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: coturn
namespace: coturn
labels:
app.kubernetes.io/name: coturn
app.kubernetes.io/instance: coturn
spec:
type: ClusterIP
ports:
- port: 3478
targetPort: 3478
protocol: UDP
name: turn-port1-udp
- port: 3478
targetPort: 3478
protocol: TCP
name: turn-port1-tcp
- port: 5349
targetPort: 5349
protocol: UDP
name: turn-port2-udp
- port: 5349
targetPort: 5349
protocol: TCP
name: turn-port2-tcp
selector:
app.kubernetes.io/name: coturn
app.kubernetes.io/instance: coturn