kubs/alaskarTV/alaskarfin/deployment-worker.yaml
2025-02-01 22:13:08 +03:00

86 lines
2.2 KiB
YAML

apiVersion: apps/v1
kind: Deployment
metadata:
annotations:
keel.sh/policy: minor
keel.sh/trigger: poll
keel.sh/pollSchedule: "@every 6h"
labels:
io.kompose.service: jellyfin-worker
name: jellyfin-worker
spec:
replicas: 1
selector:
matchLabels:
io.kompose.service: jellyfin-worker
strategy:
type: Recreate
template:
metadata:
labels:
io.kompose.service: jellyfin-worker
spec:
nodeSelector:
role: worker
securityContext:
supplementalGroups:
- 105
runAsUser: 1000
runAsGroup: 1000
containers:
- env:
- name: PGID
value: "1000"
- name: PUID
value: "1000"
- name: TZ
value: Asia/Kuwait
securityContext:
privileged: true
image: nomadics/alaskarfin:latest
name: jellyfin
ports:
- containerPort: 8096
protocol: TCP
volumeMounts:
- mountPath: /config
name: ssd
subPath: configs/jellyarr/jellyfin_worker_config
- mountPath: /data/tvshows
name: ssd
subPath: tvshows
- mountPath: /data/movies
name: ssd
subPath: movies
- mountPath: /data/anime
name: ssd
subPath: anime
- mountPath: /data/books/audiobooks
name: ssd
subPath: books/audiobooks
- mountPath: /data/books/ebooks
name: ssd
subPath: books/ebooks
- mountPath: /HDD
name: hdd
- mountPath: /mnt/zurg/__all__
name: rclone-data
subPath: __all__
- name: render-d128
mountPath: /dev/dri/renderD128
restartPolicy: Always
volumes:
- name: ssd
persistentVolumeClaim:
claimName: nfs-ssd-pvc
- name: hdd
persistentVolumeClaim:
claimName: nfs-hdd-pvc
- name: rclone-data
persistentVolumeClaim:
claimName: rclone-data-pvc
- name: render-d128
hostPath:
path: /dev/dri/renderD128