Merge branch 'main' of ssh://repo.prod.meissa.de:2222/meissa/c4k-jitsi
commit
eec7bfa37c
@ -1,11 +0,0 @@
|
||||
FROM c4k-jitsi
|
||||
|
||||
RUN apt update
|
||||
RUN apt -yqq --no-install-recommends --yes install curl default-jre-headless
|
||||
|
||||
RUN curl -L -o /tmp/serverspec.jar \
|
||||
https://github.com/DomainDrivenArchitecture/dda-serverspec-crate/releases/download/1.3.4/dda-serverspec-standalone.jar
|
||||
|
||||
COPY serverspec.edn /tmp/serverspec.edn
|
||||
|
||||
RUN java -jar /tmp/serverspec.jar /tmp/serverspec.edn -v
|
@ -1,2 +0,0 @@
|
||||
{:file [{:path "/usr/local/bin/install-debug.sh" :mod "700"}
|
||||
{:path "/defaults/settings-config.js" :mod "644"}]}
|
@ -0,0 +1,57 @@
|
||||
from os import environ
|
||||
from datetime import datetime
|
||||
from pybuilder.core import task, init
|
||||
from ddadevops import *
|
||||
|
||||
name = "c4k-jitsi"
|
||||
MODULE = "excalidraw-backend"
|
||||
PROJECT_ROOT_PATH = "../.."
|
||||
version = "1.5.2-SNAPSHOT"
|
||||
|
||||
|
||||
@init
|
||||
def initialize(project):
|
||||
image_tag = version
|
||||
if "dev" in image_tag:
|
||||
image_tag += datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||
|
||||
input = {
|
||||
"name": name,
|
||||
"module": MODULE,
|
||||
"stage": "notused",
|
||||
"project_root_path": PROJECT_ROOT_PATH,
|
||||
"build_types": ["IMAGE"],
|
||||
"mixin_types": [],
|
||||
"image_naming": "NAME_AND_MODULE",
|
||||
"image_tag": f"{image_tag}",
|
||||
}
|
||||
|
||||
project.build_depends_on("ddadevops>=4.7.0")
|
||||
|
||||
build = DevopsImageBuild(project, input)
|
||||
build.initialize_build_dir()
|
||||
|
||||
|
||||
@task
|
||||
def image(project):
|
||||
build = get_devops_build(project)
|
||||
build.image()
|
||||
|
||||
|
||||
@task
|
||||
def drun(project):
|
||||
build = get_devops_build(project)
|
||||
build.drun()
|
||||
|
||||
|
||||
@task
|
||||
def test(project):
|
||||
build = get_devops_build(project)
|
||||
build.test()
|
||||
|
||||
|
||||
@task
|
||||
def publish(project):
|
||||
build = get_devops_build(project)
|
||||
build.dockerhub_login()
|
||||
build.dockerhub_publish()
|
@ -0,0 +1,14 @@
|
||||
# Taken from: https://github.com/jitsi/excalidraw-backend
|
||||
FROM node:16.17-slim
|
||||
|
||||
WORKDIR /excalidraw-backend
|
||||
|
||||
|
||||
COPY resources/package.json resources/package-lock.json resources/tsconfig.json resources/src ./
|
||||
RUN npm install
|
||||
RUN npm run build
|
||||
|
||||
EXPOSE 80
|
||||
EXPOSE 9090
|
||||
|
||||
CMD ["npm", "start"]
|
@ -0,0 +1 @@
|
||||
!package-lock.json
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "excalidraw-backend",
|
||||
"version": "1.0.0",
|
||||
"main": "src/index.js",
|
||||
"description": "Excalidraw backend",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/jitsi/excalidraw-backend"
|
||||
},
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=14.0.0",
|
||||
"npm": ">=7.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/debug": "4.1.5",
|
||||
"@types/express": "4.17.11",
|
||||
"@types/node": "14.14.31",
|
||||
"@types/socket.io": "2.1.4",
|
||||
"cross-env": "^7.0.3",
|
||||
"debug": "4.3.1",
|
||||
"dotenv": "^10.0.0",
|
||||
"express": "4.17.1",
|
||||
"socket.io": "^2.5.0",
|
||||
"socket.io-prometheus-metrics": "^1.0.6",
|
||||
"ts-node-dev": "^1.1.8",
|
||||
"typescript": "4.2.3"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"lint": "eslint .",
|
||||
"lint-fix": "eslint . --fix",
|
||||
"start": "tsc && node dist/index.js",
|
||||
"start:local": "tsc && DEBUG='engine,app,socket.io:client,server' node dist/index.js",
|
||||
"start:dev": "cross-env NODE_ENV=development ts-node-dev --respawn --transpile-only src/index.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jitsi/eslint-config": "^4.1.0",
|
||||
"@types/dotenv": "^8.2.0",
|
||||
"@typescript-eslint/eslint-plugin": "5.30.5",
|
||||
"@typescript-eslint/parser": "5.30.4",
|
||||
"eslint": "8.1.0",
|
||||
"eslint-plugin-import": "2.25.2",
|
||||
"eslint-plugin-jsdoc": "37.0.3",
|
||||
"eslint-plugin-typescript-sort-keys": "^2.1.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"bufferutil": "^4.0.6",
|
||||
"utf-8-validate": "^5.0.9"
|
||||
}
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
extends: [
|
||||
'@jitsi/eslint-config',
|
||||
'@jitsi/eslint-config/jsdoc',
|
||||
'@jitsi/eslint-config/typescript',
|
||||
],
|
||||
};
|
@ -0,0 +1,107 @@
|
||||
// Taken from: https://github.com/jitsi/excalidraw-backend
|
||||
|
||||
import debug from 'debug';
|
||||
import dotenv from 'dotenv';
|
||||
import express from 'express';
|
||||
import http from 'http';
|
||||
import socketIO from 'socket.io';
|
||||
import * as prometheus from 'socket.io-prometheus-metrics';
|
||||
|
||||
const serverDebug = debug('server');
|
||||
|
||||
dotenv.config(
|
||||
process.env.NODE_ENV === 'development'
|
||||
? { path: '.env.development' }
|
||||
: { path: '.env.production' }
|
||||
);
|
||||
|
||||
const app = express();
|
||||
const port = process.env.PORT || 80; // default port to listen
|
||||
|
||||
app.get('/', (req, res) => {
|
||||
res.send('Excalidraw backend is up :)');
|
||||
});
|
||||
|
||||
const server = http.createServer(app);
|
||||
|
||||
server.listen(port, () => {
|
||||
serverDebug(`listening on port: ${port}`);
|
||||
});
|
||||
|
||||
const io = socketIO(server, {
|
||||
handlePreflightRequest: (req, res) => {
|
||||
const headers = {
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
'Access-Control-Allow-Origin': req.header?.origin ?? 'https://meet.jit.si',
|
||||
'Access-Control-Allow-Credentials': true
|
||||
};
|
||||
|
||||
res.writeHead(200, headers);
|
||||
res.end();
|
||||
},
|
||||
maxHttpBufferSize: 10e6,
|
||||
pingTimeout: 10000
|
||||
});
|
||||
|
||||
// listens on host:9090/metrics
|
||||
prometheus.metrics(io, {
|
||||
collectDefaultMetrics: true
|
||||
});
|
||||
|
||||
io.on('connection', socket => {
|
||||
serverDebug(`connection established! ${socket.conn.request.url}`);
|
||||
io.to(`${socket.id}`).emit('init-room');
|
||||
socket.on('join-room', roomID => {
|
||||
serverDebug(`${socket.id} has joined ${roomID} for url ${socket.conn.request.url}`);
|
||||
socket.join(roomID);
|
||||
if (io.sockets.adapter.rooms[roomID].length <= 1) {
|
||||
io.to(`${socket.id}`).emit('first-in-room');
|
||||
} else {
|
||||
socket.broadcast.to(roomID).emit('new-user', socket.id);
|
||||
}
|
||||
io.in(roomID).emit(
|
||||
'room-user-change',
|
||||
Object.keys(io.sockets.adapter.rooms[roomID].sockets)
|
||||
);
|
||||
});
|
||||
|
||||
socket.on(
|
||||
'server-broadcast',
|
||||
(roomID: string, encryptedData: ArrayBuffer, iv: Uint8Array) => {
|
||||
socket.broadcast.to(roomID).emit('client-broadcast', encryptedData, iv);
|
||||
}
|
||||
);
|
||||
|
||||
socket.on(
|
||||
'server-volatile-broadcast',
|
||||
(roomID: string, encryptedData: ArrayBuffer, iv: Uint8Array) => {
|
||||
socket.volatile.broadcast
|
||||
.to(roomID)
|
||||
.emit('client-broadcast', encryptedData, iv);
|
||||
}
|
||||
);
|
||||
|
||||
socket.on('disconnecting', () => {
|
||||
const rooms = io.sockets.adapter.rooms;
|
||||
|
||||
for (const roomID of Object.keys(socket.rooms)) {
|
||||
const clients = Object.keys(rooms[roomID].sockets).filter(id => id !== socket.id);
|
||||
|
||||
if (roomID !== socket.id) {
|
||||
socket.to(roomID).emit('user has left', socket.id);
|
||||
}
|
||||
|
||||
if (clients.length > 0) {
|
||||
socket.broadcast.to(roomID).emit('room-user-change', clients);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('disconnect', (reason, details) => {
|
||||
serverDebug(
|
||||
`${socket.id} was disconnected from url ${socket.conn.request.url} for the following reason: ${reason}
|
||||
${JSON.stringify(details)}`
|
||||
);
|
||||
socket.removeAllListeners();
|
||||
});
|
||||
});
|
@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": true,
|
||||
"strict": true,
|
||||
"moduleResolution": "Node",
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"outDir": "dist"
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM jitsi/web:stable-8719
|
||||
FROM jitsi/web:stable-8922-1
|
||||
|
||||
# Prepare Configuration
|
||||
ADD resources /tmp
|
@ -1,233 +0,0 @@
|
||||
kind: Ingress
|
||||
apiVersion: networking.k8s.io/v1
|
||||
metadata:
|
||||
name: jitsi
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt-staging-issuer
|
||||
ingress.kubernetes.io/ssl-redirect: 'true'
|
||||
kubernetes.io/ingress.class: ''
|
||||
spec:
|
||||
tls:
|
||||
- hosts:
|
||||
- fqdn
|
||||
secretName: tls-jitsi
|
||||
rules:
|
||||
- host: jitsi.test.meissa-gmbh.de
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: web
|
||||
port:
|
||||
number: 80
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: jitsi-config
|
||||
type: Opaque
|
||||
data:
|
||||
JVB_AUTH_PASSWORD: SnZiQXV0aA==
|
||||
JICOFO_AUTH_PASSWORD: Smljb2ZvQXV0aA==
|
||||
JICOFO_COMPONENT_SECRET: Smljb2ZvQ29tcFNlYw==
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
service: jvb
|
||||
name: jvb-udp
|
||||
spec:
|
||||
type: NodePort
|
||||
externalTrafficPolicy: Cluster
|
||||
ports:
|
||||
- port: 30300
|
||||
protocol: UDP
|
||||
targetPort: 30300
|
||||
nodePort: 30300
|
||||
selector:
|
||||
app: jitsi
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
service: web
|
||||
name: web
|
||||
spec:
|
||||
ports:
|
||||
- name: http
|
||||
port: 80
|
||||
targetPort: 80
|
||||
- name: https
|
||||
port: 443
|
||||
targetPort: 443
|
||||
selector:
|
||||
app: jitsi
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app: jitsi
|
||||
name: jitsi
|
||||
spec:
|
||||
strategy:
|
||||
type: Recreate
|
||||
selector:
|
||||
matchLabels:
|
||||
app: jitsi
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: jitsi
|
||||
spec:
|
||||
containers:
|
||||
- name: jicofo
|
||||
image: jitsi/jicofo:stable-6826
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: XMPP_SERVER
|
||||
value: localhost
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_MUC_DOMAIN
|
||||
value: muc.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: JICOFO_COMPONENT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_COMPONENT_SECRET
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: JICOFO_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_AUTH_PASSWORD
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
- name: JVB_BREWERY_MUC
|
||||
value: jvbbrewery
|
||||
- name: prosody
|
||||
image: jitsi/prosody:stable-6826
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: PUBLIC_URL
|
||||
value: jitsi.test.meissa-gmbh.de
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_MUC_DOMAIN
|
||||
value: muc.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: JICOFO_COMPONENT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_COMPONENT_SECRET
|
||||
- name: JVB_AUTH_USER
|
||||
value: jvb
|
||||
- name: JVB_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JVB_AUTH_PASSWORD
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: JICOFO_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_AUTH_PASSWORD
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
- name: JVB_TCP_HARVESTER_DISABLED
|
||||
value: 'true'
|
||||
- name: web
|
||||
image: domaindrivenarchitecture/c4k-jitsi
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: PUBLIC_URL
|
||||
value: jitsi.test.meissa-gmbh.de
|
||||
- name: XMPP_SERVER
|
||||
value: localhost
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: XMPP_BOSH_URL_BASE
|
||||
value: http://127.0.0.1:5280
|
||||
- name: XMPP_MUC_DOMAIN
|
||||
value: muc.meet.jitsi
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
- name: JVB_TCP_HARVESTER_DISABLED
|
||||
value: 'true'
|
||||
- name: DEFAULT_LANGUAGE
|
||||
value: de
|
||||
- name: RESOLUTION
|
||||
value: '480'
|
||||
- name: RESOLUTION_MIN
|
||||
value: '240'
|
||||
- name: RESOLUTION_WIDTH
|
||||
value: '853'
|
||||
- name: RESOLUTION_WIDTH_MIN
|
||||
value: '427'
|
||||
- name: DISABLE_AUDIO_LEVELS
|
||||
value: 'true'
|
||||
- name: jvb
|
||||
image: jitsi/jvb:stable-6826
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: XMPP_SERVER
|
||||
value: localhost
|
||||
- name: DOCKER_HOST_ADDRESS
|
||||
value: localhost
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: JVB_STUN_SERVERS
|
||||
value: stun.1und1.de:3478,stun.t-online.de:3478,stun.hosteurope.de:3478
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: JVB_TCP_HARVESTER_DISABLED
|
||||
value: 'true'
|
||||
- name: JVB_AUTH_USER
|
||||
value: jvb
|
||||
- name: JVB_PORT
|
||||
value: '30300'
|
||||
- name: JVB_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JVB_AUTH_PASSWORD
|
||||
- name: JICOFO_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_AUTH_PASSWORD
|
||||
- name: JVB_BREWERY_MUC
|
||||
value: jvbbrewery
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
|
@ -1,453 +0,0 @@
|
||||
14:44:10.320 [main] INFO o.d.p.f.c.processors.RemoteProcessor - Connecting to jitsi.test.meissa-gmbh.de/49.12.243.171 with user: root with ssh-key
|
||||
14:44:10.717 [main] INFO c.h.s.u.k.OpenSSHKeyV1KeyFile - Read key type: ssh-rsa
|
||||
14:44:10.815 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:12.201 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "timeout 1 sudo id"
|
||||
14:44:12.310 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: uid=0(root) gid=0(root) groups=0(root)
|
||||
, [/bin/bash, -c, timeout 1 sudo id]
|
||||
14:44:12.385 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "test -e /etc/netplan/99-loopback.yaml"
|
||||
14:44:12.443 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, test -e /etc/netplan/99-loopback.yaml]
|
||||
14:44:12.460 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/netplan/99-loopback.yaml"
|
||||
14:44:12.532 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/netplan/99-loopback.yaml]
|
||||
14:44:12.534 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:12.607 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "netplan apply""
|
||||
14:44:13.851 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "netplan apply"]
|
||||
14:44:13.852 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "test -e /etc/rancher/k3s/config.yaml"
|
||||
14:44:13.911 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, test -e /etc/rancher/k3s/config.yaml]
|
||||
14:44:13.913 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /etc/kubernetes/""
|
||||
14:44:13.981 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /etc/kubernetes/"]
|
||||
14:44:13.983 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /etc/kubernetes/""
|
||||
14:44:14.055 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /etc/kubernetes/"]
|
||||
14:44:14.056 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /var/lib/rancher/k3s/server/manifests/""
|
||||
14:44:14.127 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /var/lib/rancher/k3s/server/manifests/"]
|
||||
14:44:14.128 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /var/lib/rancher/k3s/server/manifests/""
|
||||
14:44:14.197 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /var/lib/rancher/k3s/server/manifests/"]
|
||||
14:44:14.198 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /etc/rancher/k3s/manifests/""
|
||||
14:44:14.267 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /etc/rancher/k3s/manifests/"]
|
||||
14:44:14.267 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /etc/rancher/k3s/manifests/""
|
||||
14:44:14.337 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /etc/rancher/k3s/manifests/"]
|
||||
14:44:14.338 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /var/pvc1""
|
||||
14:44:14.400 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /var/pvc1"]
|
||||
14:44:14.401 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc1""
|
||||
14:44:14.470 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc1"]
|
||||
14:44:14.470 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && test -d /var/pvc2""
|
||||
14:44:14.537 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && test -d /var/pvc2"]
|
||||
14:44:14.537 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc2""
|
||||
14:44:14.608 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "cd ~/ && mkdir -p /var/pvc2"]
|
||||
14:44:14.612 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/config.yaml"
|
||||
14:44:14.684 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/config.yaml]
|
||||
14:44:14.684 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:14.757 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 755 /dev/null /usr/local/bin/k3s-install.sh"
|
||||
14:44:14.827 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 755 /dev/null /usr/local/bin/k3s-install.sh]
|
||||
14:44:14.827 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:14.936 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "INSTALL_K3S_CHANNEL=latest k3s-install.sh"
|
||||
14:44:47.826 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: [INFO] Finding release for channel latest
|
||||
[INFO] Using v1.23.5+k3s1 as release
|
||||
[INFO] Downloading hash https://github.com/k3s-io/k3s/releases/download/v1.23.5+k3s1/sha256sum-amd64.txt
|
||||
[INFO] Downloading binary https://github.com/k3s-io/k3s/releases/download/v1.23.5+k3s1/k3s
|
||||
[INFO] Verifying binary download
|
||||
[INFO] Installing k3s to /usr/local/bin/k3s
|
||||
[INFO] Skipping installation of SELinux RPM
|
||||
[INFO] Creating /usr/local/bin/kubectl symlink to k3s
|
||||
[INFO] Creating /usr/local/bin/crictl symlink to k3s
|
||||
[INFO] Creating /usr/local/bin/ctr symlink to k3s
|
||||
[INFO] Creating killall script /usr/local/bin/k3s-killall.sh
|
||||
[INFO] Creating uninstall script /usr/local/bin/k3s-uninstall.sh
|
||||
[INFO] env: Creating environment file /etc/systemd/system/k3s.service.env
|
||||
[INFO] systemd: Creating service file /etc/systemd/system/k3s.service
|
||||
[INFO] systemd: Enabling k3s unit
|
||||
[INFO] systemd: Starting k3s
|
||||
, Err: Created symlink /etc/systemd/system/multi-user.target.wants/k3s.service → /etc/systemd/system/k3s.service.
|
||||
[/bin/bash, -c, INSTALL_K3S_CHANNEL=latest k3s-install.sh]
|
||||
14:44:47.829 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-namespace.yaml"
|
||||
14:44:47.889 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-namespace.yaml]
|
||||
14:44:47.889 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:47.955 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-namespace.yaml""
|
||||
14:44:49.360 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: namespace/metallb-system created
|
||||
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-namespace.yaml"]
|
||||
14:44:49.363 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml"
|
||||
14:44:49.426 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml]
|
||||
14:44:49.426 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:49.495 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml""
|
||||
14:44:50.193 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: podsecuritypolicy.policy/controller created
|
||||
podsecuritypolicy.policy/speaker created
|
||||
serviceaccount/controller created
|
||||
serviceaccount/speaker created
|
||||
clusterrole.rbac.authorization.k8s.io/metallb-system:controller created
|
||||
clusterrole.rbac.authorization.k8s.io/metallb-system:speaker created
|
||||
role.rbac.authorization.k8s.io/config-watcher created
|
||||
role.rbac.authorization.k8s.io/pod-lister created
|
||||
role.rbac.authorization.k8s.io/controller created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/metallb-system:controller created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/metallb-system:speaker created
|
||||
rolebinding.rbac.authorization.k8s.io/config-watcher created
|
||||
rolebinding.rbac.authorization.k8s.io/pod-lister created
|
||||
rolebinding.rbac.authorization.k8s.io/controller created
|
||||
daemonset.apps/speaker created
|
||||
deployment.apps/controller created
|
||||
, Err: Warning: policy/v1beta1 PodSecurityPolicy is deprecated in v1.21+, unavailable in v1.25+
|
||||
[/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-0.10.2-manifest.yaml"]
|
||||
14:44:50.196 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-config.yaml"
|
||||
14:44:50.259 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/metallb-config.yaml]
|
||||
14:44:50.259 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:50.331 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-config.yaml""
|
||||
14:44:50.897 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: configmap/config created
|
||||
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/metallb-config.yaml"]
|
||||
14:44:50.900 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/traefik.yaml"
|
||||
14:44:50.967 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/traefik.yaml]
|
||||
14:44:50.968 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:51.038 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/traefik.yaml""
|
||||
14:44:51.614 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: helmchart.helm.cattle.io/traefik-crd created
|
||||
helmchart.helm.cattle.io/traefik created
|
||||
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/traefik.yaml"]
|
||||
14:44:51.615 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml"
|
||||
14:44:51.674 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml]
|
||||
14:44:51.675 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:51.739 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml""
|
||||
14:44:52.331 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: configmap/local-path-config configured
|
||||
, Err: Warning: resource configmaps/local-path-config is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.
|
||||
[/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/local-path-provisioner-config.yaml"]
|
||||
14:44:52.332 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "kubectl set env deployment -n kube-system local-path-provisioner DEPLOY_DATE="$(date)""
|
||||
14:44:52.844 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: deployment.apps/local-path-provisioner env updated
|
||||
, [/bin/bash, -c, kubectl set env deployment -n kube-system local-path-provisioner DEPLOY_DATE="$(date)"]
|
||||
14:44:52.845 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "ln -sf /etc/rancher/k3s/k3s.yaml /etc/kubernetes/admin.conf""
|
||||
14:44:52.910 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/bash -c "ln -sf /etc/rancher/k3s/k3s.yaml /etc/kubernetes/admin.conf"]
|
||||
14:44:52.933 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/cert-manager.yaml"
|
||||
14:44:53.000 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/cert-manager.yaml]
|
||||
14:44:53.002 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.137 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.233 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.327 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.414 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.522 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.615 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.709 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.803 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:53.903 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.000 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.093 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.194 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.294 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.390 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.485 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.579 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.676 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.773 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.870 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:54.965 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.058 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.144 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.241 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.341 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.451 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.555 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.656 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.751 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:44:55.844 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/cert-manager.yaml""
|
||||
14:45:00.063 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io created
|
||||
customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io created
|
||||
customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io created
|
||||
customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io created
|
||||
customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io created
|
||||
customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io created
|
||||
namespace/cert-manager created
|
||||
serviceaccount/cert-manager-cainjector created
|
||||
serviceaccount/cert-manager created
|
||||
serviceaccount/cert-manager-webhook created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-view created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-edit created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created
|
||||
clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests created
|
||||
clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews created
|
||||
role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
|
||||
role.rbac.authorization.k8s.io/cert-manager:leaderelection created
|
||||
role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
|
||||
rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection created
|
||||
rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection created
|
||||
rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created
|
||||
service/cert-manager created
|
||||
service/cert-manager-webhook created
|
||||
deployment.apps/cert-manager-cainjector created
|
||||
deployment.apps/cert-manager created
|
||||
deployment.apps/cert-manager-webhook created
|
||||
mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created
|
||||
validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook created
|
||||
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/cert-manager.yaml"]
|
||||
14:45:00.066 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/le-issuer.yaml"
|
||||
14:45:00.128 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/le-issuer.yaml]
|
||||
14:45:00.128 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:45:00.188 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml""
|
||||
14:45:02.889 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: FAILED -- Code: 1, Err: Error from server (InternalError): error when creating "/etc/rancher/k3s/manifests/le-issuer.yaml": Internal error occurred: failed calling webhook "webhook.cert-manager.io": failed to call webhook: Post "https://cert-manager-webhook.cert-manager.svc:443/mutate?timeout=10s": dial tcp 10.43.225.199:443: connect: connection refused
|
||||
[/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml"]
|
||||
14:45:12.914 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml""
|
||||
14:45:21.515 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: clusterissuer.cert-manager.io/staging created
|
||||
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/le-issuer.yaml"]
|
||||
14:45:21.519 [main] INFO o.d.p.f.c.processors.LocalProcessor - os.name: Linux
|
||||
14:45:21.519 [main] INFO o.d.p.f.c.processors.LocalProcessor - user.home: /home/erik
|
||||
14:45:21.519 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:45:21.530 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:45:21.542 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "wc -c < /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml"
|
||||
14:45:21.586 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: 5794
|
||||
, [/bin/bash, -c, wc -c < /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml]
|
||||
14:45:21.587 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cat /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml"
|
||||
14:45:21.592 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: kind: Ingress
|
||||
apiVersion: networking.k8s.io/v1
|
||||
metadata:
|
||||
name: jitsi
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt-staging-issuer
|
||||
ingress.kubernetes.io/ssl-redirect: 'true'
|
||||
kubernetes.io/ingress.class: ''
|
||||
spec:
|
||||
tls:
|
||||
- hosts:
|
||||
- fqdn
|
||||
secretName: tls-jitsi
|
||||
rules:
|
||||
- host: jitsi.test.meissa-gmbh.de
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: web
|
||||
port:
|
||||
number: 80
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: jitsi-config
|
||||
type: Opaque
|
||||
data:
|
||||
JVB_AUTH_PASSWORD: SnZiQXV0aA==
|
||||
JICOFO_AUTH_PASSWORD: Smljb2ZvQXV0aA==
|
||||
JICOFO_COMPONENT_SECRET: Smljb2ZvQ29tcFNlYw==
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
service: jvb
|
||||
name: jvb-udp
|
||||
spec:
|
||||
type: NodePort
|
||||
externalTrafficPolicy: Cluster
|
||||
ports:
|
||||
- port: 30300
|
||||
protocol: UDP
|
||||
targetPort: 30300
|
||||
nodePort: 30300
|
||||
selector:
|
||||
app: jitsi
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
service: web
|
||||
name: web
|
||||
spec:
|
||||
ports:
|
||||
- name: http
|
||||
port: 80
|
||||
targetPort: 80
|
||||
- name: https
|
||||
port: 443
|
||||
targetPort: 443
|
||||
selector:
|
||||
app: jitsi
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app: jitsi
|
||||
name: jitsi
|
||||
spec:
|
||||
strategy:
|
||||
type: Recreate
|
||||
selector:
|
||||
matchLabels:
|
||||
app: jitsi
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: jitsi
|
||||
spec:
|
||||
containers:
|
||||
- name: jicofo
|
||||
image: jitsi/jicofo:stable-6826
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: XMPP_SERVER
|
||||
value: localhost
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_MUC_DOMAIN
|
||||
value: muc.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: JICOFO_COMPONENT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_COMPONENT_SECRET
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: JICOFO_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_AUTH_PASSWORD
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
- name: JVB_BREWERY_MUC
|
||||
value: jvbbrewery
|
||||
- name: prosody
|
||||
image: jitsi/prosody:stable-6826
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: PUBLIC_URL
|
||||
value: jitsi.test.meissa-gmbh.de
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_MUC_DOMAIN
|
||||
value: muc.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: JICOFO_COMPONENT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_COMPONENT_SECRET
|
||||
- name: JVB_AUTH_USER
|
||||
value: jvb
|
||||
- name: JVB_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JVB_AUTH_PASSWORD
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: JICOFO_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_AUTH_PASSWORD
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
- name: JVB_TCP_HARVESTER_DISABLED
|
||||
value: 'true'
|
||||
- name: web
|
||||
image: domaindrivenarchitecture/c4k-jitsi
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: PUBLIC_URL
|
||||
value: jitsi.test.meissa-gmbh.de
|
||||
- name: XMPP_SERVER
|
||||
value: localhost
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: XMPP_BOSH_URL_BASE
|
||||
value: http://127.0.0.1:5280
|
||||
- name: XMPP_MUC_DOMAIN
|
||||
value: muc.meet.jitsi
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
- name: JVB_TCP_HARVESTER_DISABLED
|
||||
value: 'true'
|
||||
- name: DEFAULT_LANGUAGE
|
||||
value: de
|
||||
- name: RESOLUTION
|
||||
value: '480'
|
||||
- name: RESOLUTION_MIN
|
||||
value: '240'
|
||||
- name: RESOLUTION_WIDTH
|
||||
value: '853'
|
||||
- name: RESOLUTION_WIDTH_MIN
|
||||
value: '427'
|
||||
- name: DISABLE_AUDIO_LEVELS
|
||||
value: 'true'
|
||||
- name: jvb
|
||||
image: jitsi/jvb:stable-6826
|
||||
imagePullPolicy: IfNotPresent
|
||||
env:
|
||||
- name: XMPP_SERVER
|
||||
value: localhost
|
||||
- name: DOCKER_HOST_ADDRESS
|
||||
value: localhost
|
||||
- name: XMPP_DOMAIN
|
||||
value: meet.jitsi
|
||||
- name: XMPP_AUTH_DOMAIN
|
||||
value: auth.meet.jitsi
|
||||
- name: XMPP_INTERNAL_MUC_DOMAIN
|
||||
value: internal-muc.meet.jitsi
|
||||
- name: JVB_STUN_SERVERS
|
||||
value: stun.1und1.de:3478,stun.t-online.de:3478,stun.hosteurope.de:3478
|
||||
- name: JICOFO_AUTH_USER
|
||||
value: focus
|
||||
- name: JVB_TCP_HARVESTER_DISABLED
|
||||
value: 'true'
|
||||
- name: JVB_AUTH_USER
|
||||
value: jvb
|
||||
- name: JVB_PORT
|
||||
value: '30300'
|
||||
- name: JVB_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JVB_AUTH_PASSWORD
|
||||
- name: JICOFO_AUTH_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: jitsi-config
|
||||
key: JICOFO_AUTH_PASSWORD
|
||||
- name: JVB_BREWERY_MUC
|
||||
value: jvbbrewery
|
||||
- name: TZ
|
||||
value: Europe/Berlin
|
||||
|
||||
, [/bin/bash, -c, cat /home/erik/repo/c4k/c4k-jitsi/jitsi.yaml]
|
||||
14:45:21.593 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/application.yaml"
|
||||
14:45:21.655 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo install -m 644 /dev/null /etc/rancher/k3s/manifests/application.yaml]
|
||||
14:45:21.656 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "xxxxxxxx"
|
||||
14:45:21.739 [main] INFO o.d.p.f.c.processors.RemoteProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/application.yaml""
|
||||
14:45:23.388 [main] INFO o.d.p.f.c.processors.RemoteProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: ingress.networking.k8s.io/jitsi created
|
||||
secret/jitsi-config created
|
||||
service/jvb-udp created
|
||||
service/web created
|
||||
deployment.apps/jitsi created
|
||||
, [/bin/bash, -c, sudo /bin/bash -c "kubectl apply -f /etc/rancher/k3s/manifests/application.yaml"]
|
||||
14:45:23.389 [main] INFO o.d.p.f.c.processors.RemoteProcessor - Disconnecting ssh.
|
@ -1,146 +0,0 @@
|
||||
13:56:28.246 [main] INFO o.d.p.f.c.processors.LocalProcessor - os.name: Linux
|
||||
13:56:28.249 [main] INFO o.d.p.f.c.processors.LocalProcessor - user.home: /home/erik
|
||||
13:56:28.251 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "xxxxxxxx"
|
||||
13:56:28.270 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "xxxxxxxx"
|
||||
13:56:28.294 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-jira/.git/"
|
||||
13:56:28.323 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-jira/.git/]
|
||||
13:56:28.325 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-jira && git pull"
|
||||
13:56:29.917 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-jira && git pull]
|
||||
13:56:29.917 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-jitsi/.git/"
|
||||
13:56:29.919 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-jitsi/.git/]
|
||||
13:56:29.919 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-jitsi && git pull"
|
||||
13:56:31.398 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-jitsi && git pull]
|
||||
13:56:31.399 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-keycloak/.git/"
|
||||
13:56:31.402 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-keycloak/.git/]
|
||||
13:56:31.402 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-keycloak && git pull"
|
||||
13:56:32.832 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-keycloak && git pull]
|
||||
13:56:32.833 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-mastodon-bot/.git/"
|
||||
13:56:32.835 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-mastodon-bot/.git/]
|
||||
13:56:32.836 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-mastodon-bot && git pull"
|
||||
13:56:34.241 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-mastodon-bot && git pull]
|
||||
13:56:34.242 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-nextcloud/.git/"
|
||||
13:56:34.245 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-nextcloud/.git/]
|
||||
13:56:34.246 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-nextcloud && git pull"
|
||||
13:56:35.665 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-nextcloud && git pull]
|
||||
13:56:35.666 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-shynet/.git/"
|
||||
13:56:35.670 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-shynet/.git/]
|
||||
13:56:35.671 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-shynet && git pull"
|
||||
13:56:37.120 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-shynet && git pull]
|
||||
13:56:37.121 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd ~/ && test -d repo/c4k/c4k-common/.git/"
|
||||
13:56:37.125 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, cd ~/ && test -d repo/c4k/c4k-common/.git/]
|
||||
13:56:37.125 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-common && git pull"
|
||||
13:56:38.527 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/c4k/c4k-common && git pull]
|
||||
13:56:38.529 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/provisioning/provs && git pull"
|
||||
13:56:39.985 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/provisioning/provs && git pull]
|
||||
13:56:39.986 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/provisioning/meissa-provs && git pull"
|
||||
13:56:40.292 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Already up to date.
|
||||
, [/bin/bash, -c, cd repo/provisioning/meissa-provs && git pull]
|
||||
13:56:40.293 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-jira/ && lein uberjar"
|
||||
13:56:51.053 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Created /home/erik/repo/c4k/c4k-jira/target/uberjar/c4k-jira-1.1.2-SNAPSHOT.jar
|
||||
Created /home/erik/repo/c4k/c4k-jira/target/uberjar/c4k-jira-standalone.jar
|
||||
, Err: OpenJDK 64-Bit Server VM warning: Options -Xverify:none and -noverify were deprecated in JDK 13 and will likely be removed in a future release.
|
||||
Compiling dda.c4k-jira.backup
|
||||
Compiling dda.c4k-jira.core
|
||||
Compiling dda.c4k-jira.jira
|
||||
Compiling dda.c4k-jira.uberjar
|
||||
[/bin/bash, -c, cd repo/c4k/c4k-jira/ && lein uberjar]
|
||||
13:56:51.053 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/cp -rf repo/c4k/c4k-jira/target/uberjar/c4k-jira-standalone.jar /usr/local/bin/c4k-jira-standalone.jar"
|
||||
13:56:51.121 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/cp -rf repo/c4k/c4k-jira/target/uberjar/c4k-jira-standalone.jar /usr/local/bin/c4k-jira-standalone.jar]
|
||||
13:56:51.122 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo chmod +x /usr/local/bin/c4k-jira-standalone.jar"
|
||||
13:56:51.132 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo chmod +x /usr/local/bin/c4k-jira-standalone.jar]
|
||||
13:56:51.133 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-jitsi/ && lein uberjar"
|
||||
13:57:01.082 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Created /home/erik/repo/c4k/c4k-jitsi/target/uberjar/c4k-jitsi-1.0.0-SNAPSHOT.jar
|
||||
Created /home/erik/repo/c4k/c4k-jitsi/target/uberjar/c4k-jitsi-standalone.jar
|
||||
, Err: OpenJDK 64-Bit Server VM warning: Options -Xverify:none and -noverify were deprecated in JDK 13 and will likely be removed in a future release.
|
||||
Compiling dda.c4k-jitsi.core
|
||||
Compiling dda.c4k-jitsi.jitsi
|
||||
Compiling dda.c4k-jitsi.uberjar
|
||||
[/bin/bash, -c, cd repo/c4k/c4k-jitsi/ && lein uberjar]
|
||||
13:57:01.082 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/cp -rf repo/c4k/c4k-jitsi/target/uberjar/c4k-jitsi-standalone.jar /usr/local/bin/c4k-jitsi-standalone.jar"
|
||||
13:57:01.180 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/cp -rf repo/c4k/c4k-jitsi/target/uberjar/c4k-jitsi-standalone.jar /usr/local/bin/c4k-jitsi-standalone.jar]
|
||||
13:57:01.181 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo chmod +x /usr/local/bin/c4k-jitsi-standalone.jar"
|
||||
13:57:01.195 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo chmod +x /usr/local/bin/c4k-jitsi-standalone.jar]
|
||||
13:57:01.195 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-keycloak/ && lein uberjar"
|
||||
13:57:10.165 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Created /home/erik/repo/c4k/c4k-keycloak/target/uberjar/c4k-keycloak-0.2.4-SNAPSHOT.jar
|
||||
Created /home/erik/repo/c4k/c4k-keycloak/target/uberjar/c4k-keycloak-standalone.jar
|
||||
, Err: OpenJDK 64-Bit Server VM warning: Options -Xverify:none and -noverify were deprecated in JDK 13 and will likely be removed in a future release.
|
||||
Compiling dda.c4k-keycloak.core
|
||||
Compiling dda.c4k-keycloak.keycloak
|
||||
Compiling dda.c4k-keycloak.postgres
|
||||
Compiling dda.c4k-keycloak.uberjar
|
||||
Compiling dda.c4k-keycloak.yaml
|
||||
[/bin/bash, -c, cd repo/c4k/c4k-keycloak/ && lein uberjar]
|
||||
13:57:10.165 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/cp -rf repo/c4k/c4k-keycloak/target/uberjar/c4k-keycloak-standalone.jar /usr/local/bin/c4k-keycloak-standalone.jar"
|
||||
13:57:10.187 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/cp -rf repo/c4k/c4k-keycloak/target/uberjar/c4k-keycloak-standalone.jar /usr/local/bin/c4k-keycloak-standalone.jar]
|
||||
13:57:10.187 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo chmod +x /usr/local/bin/c4k-keycloak-standalone.jar"
|
||||
13:57:10.193 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo chmod +x /usr/local/bin/c4k-keycloak-standalone.jar]
|
||||
13:57:10.194 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-mastodon-bot/ && lein uberjar"
|
||||
13:57:17.810 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Created /home/erik/repo/c4k/c4k-mastodon-bot/target/uberjar/c4k-mastodon-bot-0.1.11-SNAPSHOT.jar
|
||||
Created /home/erik/repo/c4k/c4k-mastodon-bot/target/uberjar/c4k-mastodon-bot-standalone.jar
|
||||
, Err: OpenJDK 64-Bit Server VM warning: Options -Xverify:none and -noverify were deprecated in JDK 13 and will likely be removed in a future release.
|
||||
Compiling dda.c4k-mastodon-bot.core
|
||||
Compiling dda.c4k-mastodon-bot.uberjar
|
||||
Compiling dda.c4k-mastodon-bot.yaml
|
||||
[/bin/bash, -c, cd repo/c4k/c4k-mastodon-bot/ && lein uberjar]
|
||||
13:57:17.811 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/cp -rf repo/c4k/c4k-mastodon-bot/target/uberjar/c4k-mastodon-bot-standalone.jar /usr/local/bin/c4k-mastodon-bot-standalone.jar"
|
||||
13:57:17.857 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/cp -rf repo/c4k/c4k-mastodon-bot/target/uberjar/c4k-mastodon-bot-standalone.jar /usr/local/bin/c4k-mastodon-bot-standalone.jar]
|
||||
13:57:17.858 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo chmod +x /usr/local/bin/c4k-mastodon-bot-standalone.jar"
|
||||
13:57:17.866 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo chmod +x /usr/local/bin/c4k-mastodon-bot-standalone.jar]
|
||||
13:57:17.867 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-nextcloud/ && lein uberjar"
|
||||
13:57:25.766 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Created /home/erik/repo/c4k/c4k-nextcloud/target/uberjar/c4k-nextcloud-4.0.1-SNAPSHOT.jar
|
||||
Created /home/erik/repo/c4k/c4k-nextcloud/target/uberjar/c4k-nextcloud-standalone.jar
|
||||
, Err: OpenJDK 64-Bit Server VM warning: Options -Xverify:none and -noverify were deprecated in JDK 13 and will likely be removed in a future release.
|
||||
Compiling dda.c4k-nextcloud.backup
|
||||
Compiling dda.c4k-nextcloud.core
|
||||
Compiling dda.c4k-nextcloud.nextcloud
|
||||
Compiling dda.c4k-nextcloud.uberjar
|
||||
[/bin/bash, -c, cd repo/c4k/c4k-nextcloud/ && lein uberjar]
|
||||
13:57:25.766 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/cp -rf repo/c4k/c4k-nextcloud/target/uberjar/c4k-nextcloud-standalone.jar /usr/local/bin/c4k-nextcloud-standalone.jar"
|
||||
13:57:25.803 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/cp -rf repo/c4k/c4k-nextcloud/target/uberjar/c4k-nextcloud-standalone.jar /usr/local/bin/c4k-nextcloud-standalone.jar]
|
||||
13:57:25.803 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo chmod +x /usr/local/bin/c4k-nextcloud-standalone.jar"
|
||||
13:57:25.812 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo chmod +x /usr/local/bin/c4k-nextcloud-standalone.jar]
|
||||
13:57:25.813 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/c4k/c4k-shynet/ && lein uberjar"
|
||||
13:57:37.422 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Created /home/erik/repo/c4k/c4k-shynet/target/uberjar/c4k-shynet-1.0.7-SNAPSHOT.jar
|
||||
Created /home/erik/repo/c4k/c4k-shynet/target/uberjar/c4k-shynet-standalone.jar
|
||||
, Err: OpenJDK 64-Bit Server VM warning: Options -Xverify:none and -noverify were deprecated in JDK 13 and will likely be removed in a future release.
|
||||
Compiling dda.c4k-shynet.core
|
||||
Compiling dda.c4k-shynet.shynet
|
||||
Compiling dda.c4k-shynet.uberjar
|
||||
[/bin/bash, -c, cd repo/c4k/c4k-shynet/ && lein uberjar]
|
||||
13:57:37.423 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo /bin/cp -rf repo/c4k/c4k-shynet/target/uberjar/c4k-shynet-standalone.jar /usr/local/bin/c4k-shynet-standalone.jar"
|
||||
13:57:37.620 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo /bin/cp -rf repo/c4k/c4k-shynet/target/uberjar/c4k-shynet-standalone.jar /usr/local/bin/c4k-shynet-standalone.jar]
|
||||
13:57:37.621 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "sudo chmod +x /usr/local/bin/c4k-shynet-standalone.jar"
|
||||
13:57:37.653 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, [/bin/bash, -c, sudo chmod +x /usr/local/bin/c4k-shynet-standalone.jar]
|
||||
13:57:37.665 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/provisioning/provs && ./gradlew installl"
|
||||
13:57:50.319 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: Starting a Gradle Daemon (subsequent builds will be faster)
|
||||
> Task :compileKotlin UP-TO-DATE
|
||||
> Task :compileJava NO-SOURCE
|
||||
> Task :processResources UP-TO-DATE
|
||||
> Task :classes UP-TO-DATE
|
||||
> Task :uberjarDesktop UP-TO-DATE
|
||||
> Task :uberjarServer UP-TO-DATE
|
||||
> Task :uberjarSyspec UP-TO-DATE
|
||||
> Task :installlocally
|
||||
|
||||
BUILD SUCCESSFUL in 12s
|
||||
6 actionable tasks: 1 executed, 5 up-to-date
|
||||
, [/bin/bash, -c, cd repo/provisioning/provs && ./gradlew installl]
|
||||
13:57:50.319 [main] INFO o.d.p.f.c.processors.LocalProcessor - ******************** Prov: "/bin/bash" "-c" "cd repo/provisioning/meissa-provs && ./gradlew installl"
|
||||
13:57:51.960 [main] INFO o.d.p.f.c.processors.LocalProcessor - --->>> ProcessResult: Succeeded -- Code: 0, Out: > Task :compileKotlin UP-TO-DATE
|
||||
> Task :compileJava NO-SOURCE
|
||||
> Task :processResources UP-TO-DATE
|
||||
> Task :classes UP-TO-DATE
|
||||
> Task :uberjar UP-TO-DATE
|
||||
> Task :installlocally
|
||||
|
||||
BUILD SUCCESSFUL in 1s
|
||||
4 actionable tasks: 1 executed, 3 up-to-date
|
||||
, [/bin/bash, -c, cd repo/provisioning/meissa-provs && ./gradlew installl]
|
@ -0,0 +1,13 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
labels:
|
||||
service: excalidraw-backend
|
||||
name: excalidraw-backend
|
||||
spec:
|
||||
ports:
|
||||
- name: excalidraw-backend
|
||||
port: 3002
|
||||
targetPort: 80
|
||||
selector:
|
||||
app: excalidraw-backend
|
@ -0,0 +1,20 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app: excalidraw-backend
|
||||
name: excalidraw-backend
|
||||
spec:
|
||||
strategy:
|
||||
type: Recreate
|
||||
selector:
|
||||
matchLabels:
|
||||
app: excalidraw-backend
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: excalidraw-backend
|
||||
spec:
|
||||
containers:
|
||||
- name: excalidraw-backend
|
||||
image: domaindrivenarchitecture/c4k-jitsi-excalidraw-backend
|
Loading…
Reference in New Issue