Auto-sync dyno.yml 2026-01-27 01:57

This commit is contained in:
Maddox 2026-01-27 01:57:38 +00:00
parent bb27e55004
commit 66ceea1359
9 changed files with 1812 additions and 25 deletions

View file

@ -1267,7 +1267,7 @@ http:
channeltube: channeltube:
loadBalancer: loadBalancer:
servers: servers:
- url: http://192.168.1.252:5444 - url: http://192.168.1.80:5444
passHostHeader: false passHostHeader: false
cyberchef: cyberchef:
loadBalancer: loadBalancer:
@ -1283,7 +1283,7 @@ http:
requests: requests:
loadBalancer: loadBalancer:
servers: servers:
- url: http://192.168.1.252:5055 - url: http://192.168.1.80:5055
passHostHeader: false passHostHeader: false
lidarr: lidarr:
loadBalancer: loadBalancer:
@ -1367,12 +1367,12 @@ http:
autoscan: autoscan:
loadBalancer: loadBalancer:
servers: servers:
- url: http://192.168.1.252:3030 - url: http://192.168.1.80:3030
passHostHeader: false passHostHeader: false
watchstate: watchstate:
loadBalancer: loadBalancer:
servers: servers:
- url: http://192.168.1.252:8585 - url: http://192.168.1.80:8585
passHostHeader: false passHostHeader: false
node-red-het: node-red-het:
loadBalancer: loadBalancer:
@ -1447,7 +1447,7 @@ http:
dispatcharr: dispatcharr:
loadBalancer: loadBalancer:
servers: servers:
- url: http://192.168.1.252:9191 - url: http://192.168.1.80:9191
technitium: technitium:
loadBalancer: loadBalancer:
servers: servers:

405
migration/ansible_controller.sh Executable file
View file

@ -0,0 +1,405 @@
#!/bin/bash
# Matrix Ansible Controller - Migration Script
# Target: replicant (.80) - change if desired
# Run this on the control server (CT 127)
set -e
SERVICE_NAME="matrix-ansible-controller"
TARGET_HOST="replicant"
COMPOSE_DIR=~/clustered-fucks/compose-files/${TARGET_HOST}/${SERVICE_NAME}
PLAYBOOK_DIR=~/clustered-fucks/playbooks
echo "=== Matrix Ansible Controller Setup Script ==="
echo "Service: ${SERVICE_NAME}"
echo "Target: ${TARGET_HOST}"
echo ""
# Create directories
mkdir -p "$COMPOSE_DIR"
mkdir -p "$PLAYBOOK_DIR"
# =============================================================================
# Dockerfile
# =============================================================================
cat > "$COMPOSE_DIR/Dockerfile" << 'EOF'
# Matrix Ansible Controller
# Portable container for managing matrix-docker-ansible-deploy playbook
FROM python:3.12-alpine
LABEL maintainer="maddox"
LABEL description="Portable Ansible controller for matrix-docker-ansible-deploy"
# Install system dependencies
RUN apk add --no-cache \
# Core tools
git \
openssh-client \
bash \
curl \
rsync \
# Build dependencies for Python packages
gcc \
musl-dev \
libffi-dev \
openssl-dev \
python3-dev \
# For just command runner
just \
# Useful utilities
nano \
vim \
tmux \
jq
# Install Ansible and required Python packages
RUN pip install --no-cache-dir \
ansible>=2.17.0 \
passlib \
dnspython \
netaddr \
jmespath \
docker \
requests
# Install agru (faster ansible-galaxy alternative used by the playbook)
RUN pip install --no-cache-dir agru
# Create working directories
RUN mkdir -p /playbook /inventory /ssh
# Set up SSH directory with proper permissions
RUN mkdir -p /root/.ssh && chmod 700 /root/.ssh
# Copy entrypoint script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
# Set working directory to playbook
WORKDIR /playbook
# Default environment
ENV ANSIBLE_HOST_KEY_CHECKING=False
ENV ANSIBLE_FORCE_COLOR=True
ENV TERM=xterm-256color
ENTRYPOINT ["/entrypoint.sh"]
CMD ["/bin/bash"]
EOF
echo "✅ Created $COMPOSE_DIR/Dockerfile"
# =============================================================================
# Entrypoint Script
# =============================================================================
cat > "$COMPOSE_DIR/entrypoint.sh" << 'EOF'
#!/bin/bash
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}========================================${NC}"
echo -e "${BLUE} Matrix Ansible Controller${NC}"
echo -e "${BLUE}========================================${NC}"
# --- SSH Key Setup ---
if [ -d "/ssh" ] && [ "$(ls -A /ssh 2>/dev/null)" ]; then
echo -e "${GREEN}[SSH]${NC} Setting up SSH keys from /ssh mount..."
cp -r /ssh/* /root/.ssh/ 2>/dev/null || true
chmod 700 /root/.ssh
chmod 600 /root/.ssh/* 2>/dev/null || true
chmod 644 /root/.ssh/*.pub 2>/dev/null || true
chmod 644 /root/.ssh/known_hosts 2>/dev/null || true
chmod 644 /root/.ssh/config 2>/dev/null || true
echo -e "${GREEN}[SSH]${NC} Keys configured"
else
echo -e "${YELLOW}[SSH]${NC} No SSH keys mounted at /ssh"
echo -e "${YELLOW}[SSH]${NC} Mount with: -v ~/.ssh:/ssh:ro"
fi
# --- Playbook Setup ---
if [ ! -f "/playbook/setup.yml" ]; then
echo -e "${GREEN}[PLAYBOOK]${NC} Cloning matrix-docker-ansible-deploy..."
git clone https://github.com/spantaleev/matrix-docker-ansible-deploy.git /tmp/playbook
mv /tmp/playbook/* /playbook/
mv /tmp/playbook/.* /playbook/ 2>/dev/null || true
rm -rf /tmp/playbook
echo -e "${GREEN}[PLAYBOOK]${NC} Playbook cloned successfully"
else
echo -e "${GREEN}[PLAYBOOK]${NC} Playbook already present"
fi
# --- Inventory Setup ---
if [ -d "/inventory" ] && [ "$(ls -A /inventory 2>/dev/null)" ]; then
echo -e "${GREEN}[INVENTORY]${NC} Linking inventory from /inventory mount..."
# Remove default inventory if it exists
rm -rf /playbook/inventory 2>/dev/null || true
# Create symlink to mounted inventory
ln -sf /inventory /playbook/inventory
echo -e "${GREEN}[INVENTORY]${NC} Inventory linked: /playbook/inventory -> /inventory"
else
echo -e "${YELLOW}[INVENTORY]${NC} No inventory mounted at /inventory"
echo -e "${YELLOW}[INVENTORY]${NC} Mount with: -v /path/to/inventory:/inventory"
# Ensure default inventory directory exists
mkdir -p /playbook/inventory
fi
# --- Install/Update Ansible Roles ---
if [ -f "/playbook/requirements.yml" ]; then
if [ ! -d "/playbook/roles/galaxy" ] || [ "${UPDATE_ROLES:-false}" = "true" ]; then
echo -e "${GREEN}[ROLES]${NC} Installing Ansible Galaxy roles..."
cd /playbook
if command -v agru &> /dev/null; then
# Use agru if available (faster)
agru
else
rm -rf roles/galaxy
ansible-galaxy install -r requirements.yml -p roles/galaxy/ --force
fi
echo -e "${GREEN}[ROLES]${NC} Roles installed successfully"
else
echo -e "${GREEN}[ROLES]${NC} Roles already installed (set UPDATE_ROLES=true to refresh)"
fi
fi
# --- Display Status ---
echo ""
echo -e "${BLUE}----------------------------------------${NC}"
echo -e "${GREEN}Status:${NC}"
echo -e " Ansible: $(ansible --version | head -1)"
echo -e " Playbook: /playbook"
echo -e " Inventory: /playbook/inventory"
echo ""
echo -e "${BLUE}Quick Commands:${NC}"
echo -e " just install-all # Full installation"
echo -e " just setup-all # Setup all components"
echo -e " just roles # Update roles"
echo -e " just update # git pull + update roles"
echo ""
echo -e " ansible-playbook -i inventory/hosts setup.yml --tags=install-all,start"
echo ""
echo -e "${BLUE}----------------------------------------${NC}"
echo ""
# Execute command or start interactive shell
exec "$@"
EOF
echo "✅ Created $COMPOSE_DIR/entrypoint.sh"
# =============================================================================
# Docker Compose
# =============================================================================
cat > "$COMPOSE_DIR/docker-compose.yml" << 'EOF'
# Matrix Ansible Controller
# Portable container for managing matrix-docker-ansible-deploy playbook
#
# Usage:
# docker compose up -d
# docker exec -it matrix-ansible-controller bash
# just install-all
services:
controller:
build: .
image: matrix-ansible-controller:latest
container_name: matrix-ansible-controller
hostname: matrix-controller
# Keep container running for interactive use
stdin_open: true
tty: true
volumes:
# SSH keys (read-only) - for connecting to matrix server
- /root/.ssh:/ssh:ro
# Persistent playbook directory (survives container rebuilds)
- ./data/playbook:/playbook
# Your inventory configuration (vars.yml, hosts, etc.)
- ./data/inventory:/inventory
# Persist ansible cache/facts
- ./data/ansible-cache:/root/.ansible
environment:
- ANSIBLE_HOST_KEY_CHECKING=False
- ANSIBLE_FORCE_COLOR=True
- UPDATE_ROLES=false
network_mode: bridge
deploy:
resources:
limits:
memory: 1G
cpus: '2.0'
labels:
- "com.centurylinklabs.watchtower.enable=false"
restart: unless-stopped
EOF
echo "✅ Created $COMPOSE_DIR/docker-compose.yml"
# =============================================================================
# Ansible Deployment Playbook
# =============================================================================
cat > "$PLAYBOOK_DIR/deploy-matrix-ansible-controller.yml" << 'EOF'
---
# Deploy Matrix Ansible Controller
#
# Usage:
# ansible-playbook playbooks/deploy-matrix-ansible-controller.yml
#
# After deployment:
# ssh replicant
# docker exec -it matrix-ansible-controller bash
# # Copy your vars.yml to data/inventory/host_vars/matrix.yourdomain.com/
- name: Deploy Matrix Ansible Controller
hosts: replicant
vars:
service_name: matrix-ansible-controller
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory structure
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: '0755'
loop:
- "{{ service_dir }}"
- "{{ service_dir }}/data"
- "{{ service_dir }}/data/playbook"
- "{{ service_dir }}/data/inventory"
- "{{ service_dir }}/data/inventory/host_vars"
- "{{ service_dir }}/data/ansible-cache"
- name: Copy Dockerfile
ansible.builtin.copy:
src: "{{ compose_src }}/Dockerfile"
dest: "{{ service_dir }}/Dockerfile"
mode: '0644'
- name: Copy entrypoint script
ansible.builtin.copy:
src: "{{ compose_src }}/entrypoint.sh"
dest: "{{ service_dir }}/entrypoint.sh"
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Build Docker image
community.docker.docker_image:
name: matrix-ansible-controller
tag: latest
source: build
build:
path: "{{ service_dir }}"
pull: yes
state: present
force_source: yes
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
- name: Display next steps
ansible.builtin.debug:
msg: |
✅ Matrix Ansible Controller deployed!
=== NEXT STEPS ===
1. Copy your Matrix inventory to the container:
ssh replicant
cd /home/maddox/docker/appdata/matrix-ansible-controller/data/inventory
# Create the structure:
mkdir -p host_vars/matrix.yourdomain.com
# Copy your vars.yml (from wherever it currently lives):
# Option A: From another machine via scp
# Option B: Create/paste manually
# Also create/copy the hosts file:
cat > hosts << 'HOSTS'
[matrix_servers]
matrix.yourdomain.com ansible_host=YOUR_MATRIX_IP ansible_ssh_user=root
HOSTS
2. Enter the container and test:
docker exec -it matrix-ansible-controller bash
ansible -i inventory/hosts all -m ping
3. Run Matrix updates:
just update # Update playbook + roles
just install-all # Deploy changes
===================================
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-matrix-ansible-controller.yml"
# =============================================================================
# Summary
# =============================================================================
echo ""
echo "==========================================="
echo " FILES CREATED"
echo "==========================================="
echo ""
echo "Compose files:"
echo " $COMPOSE_DIR/Dockerfile"
echo " $COMPOSE_DIR/entrypoint.sh"
echo " $COMPOSE_DIR/docker-compose.yml"
echo ""
echo "Playbook:"
echo " $PLAYBOOK_DIR/deploy-matrix-ansible-controller.yml"
echo ""
echo "==========================================="
echo " NEXT STEPS"
echo "==========================================="
echo ""
echo "1. DEPLOY:"
echo " ansible-playbook playbooks/deploy-matrix-ansible-controller.yml"
echo ""
echo "2. VERIFY:"
echo " ssh replicant 'docker ps | grep matrix-ansible'"
echo ""
echo "3. COPY YOUR MATRIX INVENTORY:"
echo " ssh replicant"
echo " cd /home/maddox/docker/appdata/matrix-ansible-controller/data/inventory"
echo " mkdir -p host_vars/matrix.yourdomain.com"
echo " # Then copy/create your vars.yml and hosts file"
echo ""
echo "4. TEST:"
echo " docker exec -it matrix-ansible-controller bash"
echo " ansible -i inventory/hosts all -m ping"
echo " just install-all"
echo ""
echo "5. COMMIT TO GIT:"
echo " git add -A && git commit -m 'Add matrix-ansible-controller' && git push"
echo ""
echo "==========================================="

150
migration/migrate-homepage.sh Executable file
View file

@ -0,0 +1,150 @@
#!/bin/bash
# Homepage Migration Script
# Target: replicant (.80)
# Run this on the control server (CT 127)
set -e
COMPOSE_DIR=~/clustered-fucks/compose-files/replicant/homepage
PLAYBOOK_DIR=~/clustered-fucks/playbooks
echo "=== Homepage Migration Script ==="
echo "Target: replicant (.80)"
echo ""
# Create directories
mkdir -p "$COMPOSE_DIR"
mkdir -p "$PLAYBOOK_DIR"
# Create docker-compose.yml
cat > "$COMPOSE_DIR/docker-compose.yml" << 'EOF'
services:
homepage:
image: ghcr.io/gethomepage/homepage:latest
container_name: homepage
environment:
- PUID=1000
- PGID=1000
- HOMEPAGE_ALLOWED_HOSTS=192.168.1.80:3305,home.3ddbrewery.com,*
volumes:
- ./config:/app/config
ports:
- "3305:3000"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 512M
cpus: '1.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_DIR/docker-compose.yml"
# Create Ansible playbook
cat > "$PLAYBOOK_DIR/deploy-homepage.yml" << 'EOF'
---
- name: Deploy Homepage to replicant
hosts: replicant
vars:
service_name: homepage
service_dir: "{{ docker_appdata }}/{{ service_name }}"
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Create config directory
ansible.builtin.file:
path: "{{ service_dir }}/config"
state: directory
mode: '0755'
- name: Pull latest image
community.docker.docker_image:
name: ghcr.io/gethomepage/homepage:latest
source: pull
force_source: yes
- name: Deploy Homepage
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: missing
register: deploy_result
- name: Show deployment result
ansible.builtin.debug:
msg: "Homepage deployed: {{ deploy_result.changed }}"
- name: Wait for container to be healthy
ansible.builtin.shell: |
for i in {1..30}; do
status=$(docker inspect --format='{{ "{{" }}.State.Health.Status{{ "}}" }}' homepage 2>/dev/null || echo "starting")
if [ "$status" = "healthy" ]; then
echo "Container is healthy"
exit 0
fi
sleep 2
done
echo "Warning: Container not healthy after 60s"
docker logs homepage 2>&1 | tail -20
register: health_check
changed_when: false
- name: Display health status
ansible.builtin.debug:
msg: "{{ health_check.stdout }}"
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-homepage.yml"
echo ""
echo "=== Files Created ==="
echo " $COMPOSE_DIR/docker-compose.yml"
echo " $PLAYBOOK_DIR/deploy-homepage.yml"
echo ""
echo "=== Next Steps ==="
echo ""
echo "1. RSYNC CONFIG FROM ALIEN (do this first!):"
echo " ssh alien 'docker stop homepage'"
echo " rsync -avP maddox@alien:/home/maddox/docker/appdata/homepage/config/ \\"
echo " maddox@replicant:/home/maddox/docker/appdata/homepage/config/"
echo ""
echo "2. DEPLOY TO REPLICANT:"
echo " cd ~/clustered-fucks"
echo " ansible-playbook playbooks/deploy-homepage.yml"
echo ""
echo "3. VERIFY:"
echo " curl -s http://192.168.1.80:3305/api/healthcheck"
echo " # or visit http://192.168.1.80:3305 in browser"
echo ""
echo "4. UPDATE TRAEFIK (on alien - update IP from .252 to .80):"
echo " # Edit /home/maddox/docker/appdata/traefik/config/config.yml"
echo " # Change homepage URL from 192.168.1.252:3305 to 192.168.1.80:3305"
echo ""
echo "5. CLEANUP ALIEN (after verification):"
echo " ssh alien 'docker rm homepage'"
echo ""
echo "6. COMMIT TO GIT:"
echo " cd ~/clustered-fucks"
echo " git add -A && git commit -m 'Deploy homepage to replicant' && git push"
echo ""

View file

@ -0,0 +1,195 @@
#!/bin/bash
# Watchstate + Jellyseerr Migration Script
# Target: replicant (.80)
# Run this on the control server (CT 127)
set -e
echo "=== Watchstate + Jellyseerr Migration Script ==="
echo "Target: replicant (192.168.1.80)"
echo ""
# ============================================
# WATCHSTATE
# ============================================
WATCHSTATE_COMPOSE_DIR=~/clustered-fucks/compose-files/replicant/watchstate
PLAYBOOK_DIR=~/clustered-fucks/playbooks
mkdir -p "$WATCHSTATE_COMPOSE_DIR"
mkdir -p "$PLAYBOOK_DIR"
cat > "$WATCHSTATE_COMPOSE_DIR/docker-compose.yml" << 'EOF'
services:
watchstate:
image: ghcr.io/arabcoders/watchstate:latest
container_name: watchstate
user: "1000:1000"
environment:
- TZ=America/Indiana/Indianapolis
volumes:
- ./config:/config:rw
ports:
- "8585:8080"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 512M
cpus: '1.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8080/v1/api/system/healthcheck || exit 1"]
interval: 30s
timeout: 10s
retries: 3
networks:
proxy:
external: true
EOF
echo "✅ Created $WATCHSTATE_COMPOSE_DIR/docker-compose.yml"
cat > "$PLAYBOOK_DIR/deploy-watchstate.yml" << 'EOF'
---
- name: Deploy Watchstate to replicant
hosts: replicant
vars:
service_name: watchstate
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
mode: '0755'
- name: Create config subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/config"
state: directory
owner: "1000"
group: "1000"
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-watchstate.yml"
# ============================================
# JELLYSEERR
# ============================================
JELLYSEERR_COMPOSE_DIR=~/clustered-fucks/compose-files/replicant/jellyseerr
mkdir -p "$JELLYSEERR_COMPOSE_DIR"
cat > "$JELLYSEERR_COMPOSE_DIR/docker-compose.yml" << 'EOF'
services:
jellyseerr:
image: fallenbagel/jellyseerr:latest
container_name: jellyseerr
environment:
- LOG_LEVEL=info
- TZ=America/Indiana/Indianapolis
volumes:
- ./config:/app/config
ports:
- "5055:5055"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 1G
cpus: '1.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $JELLYSEERR_COMPOSE_DIR/docker-compose.yml"
cat > "$PLAYBOOK_DIR/deploy-jellyseerr.yml" << 'EOF'
---
- name: Deploy Jellyseerr to replicant
hosts: replicant
vars:
service_name: jellyseerr
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
mode: '0755'
- name: Create config subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/config"
state: directory
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-jellyseerr.yml"
echo ""
echo "=========================================="
echo "=== MIGRATION STEPS ==="
echo "=========================================="
echo ""
echo "--- WATCHSTATE ---"
echo "1. STOP: ssh alien 'docker stop watchstate'"
echo "2. RSYNC: ssh replicant"
echo " rsync -avP maddox@alien:/home/maddox/docker/appdata/watchstate/ /home/maddox/docker/appdata/watchstate/config/"
echo "3. DEPLOY: ansible-playbook playbooks/deploy-watchstate.yml"
echo "4. VERIFY: curl http://192.168.1.80:8585/v1/api/system/healthcheck"
echo ""
echo "--- JELLYSEERR ---"
echo "5. STOP: ssh alien 'docker stop jellyseerr'"
echo "6. RSYNC: ssh replicant"
echo " rsync -avP maddox@alien:/home/maddox/docker/appdata/jellyseerr/ /home/maddox/docker/appdata/jellyseerr/config/"
echo "7. DEPLOY: ansible-playbook playbooks/deploy-jellyseerr.yml"
echo "8. VERIFY: curl -I http://192.168.1.80:5055"
echo ""
echo "--- CLEANUP ---"
echo "9. UPDATE TRAEFIK: Change backend IPs to 192.168.1.80"
echo "10. CLEANUP: ssh alien 'docker rm watchstate jellyseerr'"
echo "11. COMMIT: git add -A && git commit -m 'Migrate watchstate + jellyseerr to replicant' && git push"

141
migration/migrate-ntfy.sh Executable file
View file

@ -0,0 +1,141 @@
#!/bin/bash
# ntfy Migration Script
# Target: network-services (.121)
# Run this on the control server (CT 127)
set -e
COMPOSE_DIR=~/clustered-fucks/compose-files/network-services/ntfy
PLAYBOOK_DIR=~/clustered-fucks/playbooks
echo "=== ntfy Migration Script ==="
echo "Target: network-services (.121)"
echo "⚠️ CRITICAL: This is your notification hub - expect brief cluster-wide notification outage"
echo ""
# Create directories
mkdir -p "$COMPOSE_DIR"
mkdir -p "$PLAYBOOK_DIR"
# Create docker-compose.yml
cat > "$COMPOSE_DIR/docker-compose.yml" << 'EOF'
services:
ntfy:
image: binwiederhier/ntfy:latest
container_name: ntfy
command: serve
environment:
- NTFY_BASE_URL=https://ntfy.3ddbrewery.com
- NTFY_BEHIND_PROXY=true
volumes:
- ./data:/var/lib/ntfy
- ./cache:/var/cache/ntfy
ports:
- "6741:80"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_DIR/docker-compose.yml"
# Create Ansible playbook
cat > "$PLAYBOOK_DIR/deploy-ntfy.yml" << 'EOF'
---
- name: Deploy ntfy to network-services
hosts: network-services
vars:
service_name: ntfy
service_dir: "{{ docker_appdata }}/{{ service_name }}"
compose_src: "{{ playbook_dir }}/../compose-files/network-services/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
mode: '0755'
- name: Create data subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/data"
state: directory
mode: '0755'
- name: Create cache subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/cache"
state: directory
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Pull latest image
community.docker.docker_image:
name: binwiederhier/ntfy:latest
source: pull
force_source: yes
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: missing
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-ntfy.yml"
echo ""
echo "=== FILES CREATED ==="
echo " - $COMPOSE_DIR/docker-compose.yml"
echo " - $PLAYBOOK_DIR/deploy-ntfy.yml"
echo ""
echo "=== MIGRATION STEPS ==="
echo ""
echo "1. STOP OLD CONTAINER:"
echo " ssh alien 'docker stop ntfy'"
echo ""
echo "2. RSYNC DATA (run from network-services):"
echo " ssh network-services"
echo " sudo mkdir -p /home/docker/appdata/ntfy/{data,cache}"
echo " sudo rsync -avP maddox@alien:/home/maddox/docker/appdata/ntfy/ /home/docker/appdata/ntfy/data/"
echo " sudo rsync -avP maddox@alien:/home/maddox/docker/appdata/ntfy/cache/ /home/docker/appdata/ntfy/cache/"
echo " exit"
echo ""
echo "3. DEPLOY:"
echo " ansible-playbook playbooks/deploy-ntfy.yml"
echo ""
echo "4. VERIFY:"
echo " curl -s http://192.168.1.121:6741/v1/health"
echo " # Test notification:"
echo " curl -d 'Migration test' http://192.168.1.121:6741/test-migration"
echo ""
echo "5. UPDATE TRAEFIK (on alien):"
echo " # Change ntfy backend from 192.168.1.252:6741 to 192.168.1.121:6741"
echo ""
echo "6. VERIFY EXTERNAL ACCESS:"
echo " curl -d 'External test' https://ntfy.3ddbrewery.com/test-migration"
echo ""
echo "7. CLEANUP:"
echo " ssh alien 'docker rm ntfy'"
echo ""
echo "8. COMMIT:"
echo " git add -A && git commit -m 'Migrate ntfy to network-services (.121)' && git push"
echo ""
echo "=== DONE ==="

161
migration/migrate-sb.sh Executable file
View file

@ -0,0 +1,161 @@
#!/bin/bash
# SilverBullet Migration Script
# Target: databases (.81)
# Run this on the control server (CT 127)
set -e
COMPOSE_DIR=~/clustered-fucks/compose-files/databases/silverbullet
PLAYBOOK_DIR=~/clustered-fucks/playbooks
# Create directories
mkdir -p "$COMPOSE_DIR"
mkdir -p "$PLAYBOOK_DIR"
# Create docker-compose.yml
cat > "$COMPOSE_DIR/docker-compose.yml" << 'EOF'
services:
silverbullet:
image: ghcr.io/silverbulletmd/silverbullet
container_name: silverbullet
restart: unless-stopped
env_file:
- .env
volumes:
- ./space:/space
ports:
- "53510:3000"
networks:
- proxy
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
deploy:
resources:
limits:
memory: 512M
cpus: '0.5'
healthcheck:
test: ["CMD-SHELL", "curl --fail http://localhost:3000/.ping || exit 1"]
interval: 30s
timeout: 10s
retries: 3
networks:
proxy:
external: true
EOF
# Create .env file
cat > "$COMPOSE_DIR/.env" << 'EOF'
SB_USER=maddox:./sk8nh8
SB_HOSTNAME=0.0.0.0
SB_FOLDER=/space
EOF
# Create Ansible playbook
cat > "$PLAYBOOK_DIR/deploy-silverbullet.yml" << 'EOF'
---
- name: Deploy SilverBullet to databases
hosts: databases
become: yes
vars:
appdata_path: /home/docker/appdata/silverbullet
compose_src: ~/clustered-fucks/compose-files/databases/silverbullet
tasks:
- name: Create appdata directory
file:
path: "{{ appdata_path }}"
state: directory
mode: '0755'
- name: Copy docker-compose.yml
copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ appdata_path }}/docker-compose.yml"
mode: '0644'
- name: Copy .env file
copy:
src: "{{ compose_src }}/.env"
dest: "{{ appdata_path }}/.env"
mode: '0600'
- name: Pull latest image
community.docker.docker_compose_v2:
project_src: "{{ appdata_path }}"
pull: always
state: present
- name: Start SilverBullet
community.docker.docker_compose_v2:
project_src: "{{ appdata_path }}"
state: present
- name: Wait for container to be healthy
shell: |
for i in {1..30}; do
status=$(docker inspect --format='{{.State.Health.Status}}' silverbullet 2>/dev/null || echo "not_found")
if [ "$status" = "healthy" ]; then
echo "Container is healthy"
exit 0
fi
sleep 2
done
echo "Timeout waiting for healthy status"
exit 1
register: health_check
changed_when: false
- name: Show container status
shell: docker ps --filter name=silverbullet --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
register: container_status
changed_when: false
- name: Display status
debug:
var: container_status.stdout_lines
EOF
echo ""
echo "=========================================="
echo " SilverBullet Migration - Next Steps"
echo "=========================================="
echo ""
echo "1. RSYNC DATA FROM ALIEN"
echo " SSH to databases and pull the space directory:"
echo ""
echo " ssh databases"
echo " sudo mkdir -p /home/docker/appdata/silverbullet"
echo " sudo rsync -avP maddox@alien:/home/maddox/docker/appdata/silverbullet/space/ /home/docker/appdata/silverbullet/space/"
echo " exit"
echo ""
echo "2. DEPLOY VIA ANSIBLE"
echo " cd ~/clustered-fucks"
echo " ansible-playbook playbooks/deploy-silverbullet.yml"
echo ""
echo "3. VERIFY"
echo " curl -u maddox:./sk8nh8 http://192.168.1.81:53510/.ping"
echo " # Should return: OK"
echo ""
echo "4. UPDATE TRAEFIK ON ALIEN"
echo " Edit Traefik dynamic config to point silverbullet to:"
echo " http://192.168.1.81:53510"
echo ""
echo "5. TEST VIA DOMAIN"
echo " Access via your SilverBullet domain"
echo " Verify Authentik auth + SilverBullet loads"
echo ""
echo "6. CLEANUP ALIEN"
echo " ssh alien"
echo " cd /home/maddox/docker/appdata/silverbullet"
echo " docker compose down"
echo ""
echo "7. COMMIT TO GIT"
echo " cd ~/clustered-fucks"
echo " git add -A"
echo " git commit -m 'Add SilverBullet migration to databases'"
echo " git push"
echo ""
echo "=========================================="

View file

@ -0,0 +1,193 @@
#!/bin/bash
# Web-Check + CyberChef Migration Script
# Target: replicant (.80)
# Run this on the control server (CT 127)
set -e
COMPOSE_BASE=~/clustered-fucks/compose-files/replicant
PLAYBOOK_DIR=~/clustered-fucks/playbooks
echo "=== Web-Check + CyberChef Migration Script ==="
echo "Target: replicant (.80)"
echo "Both are STATELESS - no data to rsync!"
echo ""
# Create directories
mkdir -p "$COMPOSE_BASE/web-check"
mkdir -p "$COMPOSE_BASE/cyberchef"
mkdir -p "$PLAYBOOK_DIR"
# ============================================
# WEB-CHECK
# ============================================
cat > "$COMPOSE_BASE/web-check/docker-compose.yml" << 'EOF'
services:
web-check:
image: lissy93/web-check:latest
container_name: web-check
ports:
- "6160:3000"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 512M
cpus: '0.5'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_BASE/web-check/docker-compose.yml"
cat > "$PLAYBOOK_DIR/deploy-web-check.yml" << 'EOF'
---
- name: Deploy Web-Check to replicant
hosts: replicant
vars:
service_name: web-check
service_dir: "{{ docker_appdata }}/{{ service_name }}"
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Pull latest image
community.docker.docker_image:
name: lissy93/web-check:latest
source: pull
force_source: yes
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: missing
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-web-check.yml"
# ============================================
# CYBERCHEF
# ============================================
cat > "$COMPOSE_BASE/cyberchef/docker-compose.yml" << 'EOF'
services:
cyberchef:
image: mpepping/cyberchef:latest
container_name: cyberchef
ports:
- "7318:8000"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_BASE/cyberchef/docker-compose.yml"
cat > "$PLAYBOOK_DIR/deploy-cyberchef.yml" << 'EOF'
---
- name: Deploy CyberChef to replicant
hosts: replicant
vars:
service_name: cyberchef
service_dir: "{{ docker_appdata }}/{{ service_name }}"
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
mode: '0644'
- name: Pull latest image
community.docker.docker_image:
name: mpepping/cyberchef:latest
source: pull
force_source: yes
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: missing
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-cyberchef.yml"
echo ""
echo "============================================"
echo "=== Files Created ==="
echo "============================================"
echo " $COMPOSE_BASE/web-check/docker-compose.yml"
echo " $COMPOSE_BASE/cyberchef/docker-compose.yml"
echo " $PLAYBOOK_DIR/deploy-web-check.yml"
echo " $PLAYBOOK_DIR/deploy-cyberchef.yml"
echo ""
echo "============================================"
echo "=== Next Steps ==="
echo "============================================"
echo ""
echo "1. STOP OLD CONTAINERS ON ALIEN:"
echo " ssh alien 'docker stop web-check cyberchef'"
echo ""
echo "2. DEPLOY VIA ANSIBLE (no rsync needed - stateless!):"
echo " cd ~/clustered-fucks"
echo " ansible-playbook playbooks/deploy-web-check.yml"
echo " ansible-playbook playbooks/deploy-cyberchef.yml"
echo ""
echo "3. VERIFY:"
echo " curl -s -o /dev/null -w '%{http_code}' http://192.168.1.80:6160/"
echo " curl -s -o /dev/null -w '%{http_code}' http://192.168.1.80:7318/"
echo ""
echo "4. UPDATE TRAEFIK (on alien):"
echo " # Edit traefik config, change web-check URL to .80:6160"
echo " # Edit traefik config, change cyberchef URL to .80:7318"
echo ""
echo "5. CLEANUP ALIEN:"
echo " ssh alien 'docker rm web-check cyberchef'"
echo ""
echo "6. COMMIT TO GIT:"
echo " cd ~/clustered-fucks"
echo " git add -A && git commit -m 'Deploy web-check and cyberchef to replicant' && git push"
echo ""
echo "============================================"
echo "Done! Both services are stateless - no data migration needed."
echo "============================================"

View file

@ -0,0 +1,521 @@
#!/bin/bash
# ============================================================================
# Phase 2 Batch 2 Migration Script
# Target: replicant (.80)
# Services: tinymediamanager, autoscan, channeltube, dispatcharr
# Run this on the control server (CT 127)
# ============================================================================
set -e
COMPOSE_BASE=~/clustered-fucks/compose-files/replicant
PLAYBOOK_DIR=~/clustered-fucks/playbooks
echo "╔════════════════════════════════════════════════════════════════════════════╗"
echo "║ Phase 2 Batch 2: tinymediamanager, autoscan, channeltube, dispatcharr ║"
echo "║ Target: replicant (192.168.1.80) ║"
echo "╚════════════════════════════════════════════════════════════════════════════╝"
echo ""
# ============================================================================
# TINYMEDIAMANAGER
# ============================================================================
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Creating: tinymediamanager"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
mkdir -p "$COMPOSE_BASE/tinymediamanager"
cat > "$COMPOSE_BASE/tinymediamanager/docker-compose.yml" << 'EOF'
services:
tinymediamanager:
image: romancin/tinymediamanager:latest-v4
container_name: tinymediamanager
environment:
- USER_ID=1000
- GROUP_ID=1000
- TZ=America/Indiana/Indianapolis
- DISPLAY_WIDTH=1920
- DISPLAY_HEIGHT=1080
- KEEP_APP_RUNNING=1
- CLEAN_TMP_DIR=1
volumes:
- ./config:/config
- /volume1/Media:/media
ports:
- "45800:5800" # Web UI
- "45900:5900" # VNC
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 1G
cpus: '2.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_BASE/tinymediamanager/docker-compose.yml"
# Ansible playbook for tinymediamanager
cat > "$PLAYBOOK_DIR/deploy-tinymediamanager.yml" << 'EOF'
---
- name: Deploy TinyMediaManager to replicant
hosts: replicant
vars:
service_name: tinymediamanager
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Create config subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/config"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
owner: maddox
group: maddox
mode: '0644'
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
become: yes
become_user: maddox
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-tinymediamanager.yml"
# ============================================================================
# AUTOSCAN
# ============================================================================
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Creating: autoscan"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
mkdir -p "$COMPOSE_BASE/autoscan"
cat > "$COMPOSE_BASE/autoscan/docker-compose.yml" << 'EOF'
services:
autoscan:
image: cloudb0x/autoscan:latest
container_name: autoscan
environment:
- PUID=1000
- PGID=1000
- TZ=America/Indiana/Indianapolis
volumes:
- ./config:/config
- /volume1/Media:/media:ro
- /volume1/Downloads:/downloads:ro
ports:
- "3030:3030"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 512M
cpus: '1.0'
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3030/health"]
interval: 30s
timeout: 10s
start_period: 40s
retries: 3
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_BASE/autoscan/docker-compose.yml"
# Ansible playbook for autoscan
cat > "$PLAYBOOK_DIR/deploy-autoscan.yml" << 'EOF'
---
- name: Deploy Autoscan to replicant
hosts: replicant
vars:
service_name: autoscan
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Create config subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/config"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
owner: maddox
group: maddox
mode: '0644'
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
become: yes
become_user: maddox
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-autoscan.yml"
# ============================================================================
# CHANNELTUBE
# ============================================================================
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Creating: channeltube"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
mkdir -p "$COMPOSE_BASE/channeltube"
cat > "$COMPOSE_BASE/channeltube/docker-compose.yml" << 'EOF'
services:
channeltube:
image: thewicklowwolf/channeltube:latest
container_name: channeltube
environment:
- PUID=1000
- PGID=1000
- TZ=America/Indiana/Indianapolis
volumes:
- ./config:/channeltube/config
- /volume1/Media/Youtube/movies:/channeltube/downloads
- /volume1/Media/Youtube/audio:/channeltube/audio_downloads
ports:
- "5444:5000"
restart: unless-stopped
networks:
- proxy
deploy:
resources:
limits:
memory: 1G
cpus: '2.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_BASE/channeltube/docker-compose.yml"
# Ansible playbook for channeltube
cat > "$PLAYBOOK_DIR/deploy-channeltube.yml" << 'EOF'
---
- name: Deploy ChannelTube to replicant
hosts: replicant
vars:
service_name: channeltube
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Create config subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/config"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
owner: maddox
group: maddox
mode: '0644'
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
become: yes
become_user: maddox
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-channeltube.yml"
# ============================================================================
# DISPATCHARR (with proxy to gluetun VPN)
# ============================================================================
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Creating: dispatcharr (using gluetun proxy for VPN)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
mkdir -p "$COMPOSE_BASE/dispatcharr"
cat > "$COMPOSE_BASE/dispatcharr/docker-compose.yml" << 'EOF'
services:
dispatcharr:
image: ghcr.io/dispatcharr/dispatcharr:latest
container_name: dispatcharr
environment:
- PUID=1000
- PGID=1000
- TZ=America/Indiana/Indianapolis
- PORT=9191
# Route traffic through gluetun's HTTP proxy on download-stack
- HTTP_PROXY=http://192.168.1.122:38888
- HTTPS_PROXY=http://192.168.1.122:38888
- http_proxy=http://192.168.1.122:38888
- https_proxy=http://192.168.1.122:38888
# GPU settings (for future pve-alien, or CPU fallback)
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=compute,video,utility
volumes:
- ./data:/data
ports:
- "9191:9191"
restart: unless-stopped
networks:
- proxy
# Intel QuickSync access (pve-z620 has limited decode capability)
devices:
- /dev/dri:/dev/dri
group_add:
- video
- render
deploy:
resources:
limits:
memory: 2G
cpus: '2.0'
labels:
- "autoheal=true"
- "com.centurylinklabs.watchtower.enable=true"
networks:
proxy:
external: true
EOF
echo "✅ Created $COMPOSE_BASE/dispatcharr/docker-compose.yml"
# Ansible playbook for dispatcharr
cat > "$PLAYBOOK_DIR/deploy-dispatcharr.yml" << 'EOF'
---
- name: Deploy Dispatcharr to replicant
hosts: replicant
vars:
service_name: dispatcharr
service_dir: /home/maddox/docker/appdata/{{ service_name }}
compose_src: "{{ playbook_dir }}/../compose-files/replicant/{{ service_name }}"
tasks:
- name: Create service directory
ansible.builtin.file:
path: "{{ service_dir }}"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Create data subdirectory
ansible.builtin.file:
path: "{{ service_dir }}/data"
state: directory
owner: maddox
group: maddox
mode: '0755'
- name: Copy docker-compose.yml
ansible.builtin.copy:
src: "{{ compose_src }}/docker-compose.yml"
dest: "{{ service_dir }}/docker-compose.yml"
owner: maddox
group: maddox
mode: '0644'
- name: Deploy container
community.docker.docker_compose_v2:
project_src: "{{ service_dir }}"
state: present
pull: always
become: yes
become_user: maddox
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-dispatcharr.yml"
# ============================================================================
# COMBINED PLAYBOOK
# ============================================================================
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Creating: Combined deployment playbook"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
cat > "$PLAYBOOK_DIR/deploy-phase2-batch2.yml" << 'EOF'
---
# Combined playbook for Phase 2 Batch 2 migrations
# Run with: ansible-playbook playbooks/deploy-phase2-batch2.yml
- name: Deploy TinyMediaManager
import_playbook: deploy-tinymediamanager.yml
- name: Deploy Autoscan
import_playbook: deploy-autoscan.yml
- name: Deploy ChannelTube
import_playbook: deploy-channeltube.yml
- name: Deploy Dispatcharr
import_playbook: deploy-dispatcharr.yml
EOF
echo "✅ Created $PLAYBOOK_DIR/deploy-phase2-batch2.yml"
# ============================================================================
# SUMMARY AND NEXT STEPS
# ============================================================================
echo ""
echo "╔════════════════════════════════════════════════════════════════════════╗"
echo "║ FILES CREATED ║"
echo "╚════════════════════════════════════════════════════════════════════════╝"
echo ""
echo "Compose files:"
echo " - $COMPOSE_BASE/tinymediamanager/docker-compose.yml"
echo " - $COMPOSE_BASE/autoscan/docker-compose.yml"
echo " - $COMPOSE_BASE/channeltube/docker-compose.yml"
echo " - $COMPOSE_BASE/dispatcharr/docker-compose.yml"
echo ""
echo "Playbooks:"
echo " - $PLAYBOOK_DIR/deploy-tinymediamanager.yml"
echo " - $PLAYBOOK_DIR/deploy-autoscan.yml"
echo " - $PLAYBOOK_DIR/deploy-channeltube.yml"
echo " - $PLAYBOOK_DIR/deploy-dispatcharr.yml"
echo " - $PLAYBOOK_DIR/deploy-phase2-batch2.yml (combined)"
echo ""
echo "╔════════════════════════════════════════════════════════════════════════╗"
echo "║ NEXT STEPS ║"
echo "╚════════════════════════════════════════════════════════════════════════╝"
echo ""
echo "1. STOP OLD CONTAINERS on alien:"
echo " ssh alien 'docker stop tinymediamanager autoscan channeltube dispatcharr'"
echo ""
echo "2. CREATE TARGET DIRECTORIES on replicant:"
echo " ssh replicant 'mkdir -p /home/maddox/docker/appdata/{tinymediamanager,autoscan,channeltube}/config'"
echo " ssh replicant 'mkdir -p /home/maddox/docker/appdata/dispatcharr/data'"
echo ""
echo "3. RSYNC DATA (run FROM replicant):"
echo ""
echo " ssh replicant"
echo ""
echo " # TinyMediaManager config:"
echo " rsync -avP maddox@alien:/home/maddox/docker/appdata/tinymediamanager/config/ /home/maddox/docker/appdata/tinymediamanager/config/"
echo ""
echo " # Autoscan config (note: different source path on alien):"
echo " rsync -avP maddox@alien:/volume1/docker/autoscan/ /home/maddox/docker/appdata/autoscan/config/"
echo ""
echo " # ChannelTube config:"
echo " rsync -avP maddox@alien:/home/maddox/docker/appdata/channeltube/config/ /home/maddox/docker/appdata/channeltube/config/"
echo ""
echo " # Dispatcharr data (note: source is in vpn subdirectory):"
echo " rsync -avP maddox@alien:/home/maddox/docker/appdata/vpn/dispatcharr/ /home/maddox/docker/appdata/dispatcharr/data/"
echo ""
echo "4. DEPLOY ALL (combined playbook):"
echo " ansible-playbook playbooks/deploy-phase2-batch2.yml"
echo ""
echo " OR deploy individually:"
echo " ansible-playbook playbooks/deploy-tinymediamanager.yml"
echo " ansible-playbook playbooks/deploy-autoscan.yml"
echo " ansible-playbook playbooks/deploy-channeltube.yml"
echo " ansible-playbook playbooks/deploy-dispatcharr.yml"
echo ""
echo "5. VERIFY SERVICES:"
echo " curl -s http://192.168.1.80:45800/ | head -5 # TinyMediaManager"
echo " curl -s http://192.168.1.80:3030/health # Autoscan"
echo " curl -s http://192.168.1.80:5444/ | head -5 # ChannelTube"
echo " curl -s http://192.168.1.80:9191/ | head -5 # Dispatcharr"
echo ""
echo "6. UPDATE TRAEFIK (if applicable):"
echo " - tinymediamanager: backend → 192.168.1.80:45800"
echo " - autoscan: backend → 192.168.1.80:3030"
echo " - channeltube: backend → 192.168.1.80:5444"
echo " - dispatcharr: backend → 192.168.1.80:9191"
echo ""
echo "7. CLEANUP ALIEN:"
echo " ssh alien 'docker rm tinymediamanager autoscan channeltube dispatcharr'"
echo ""
echo "8. GIT COMMIT:"
echo " git add -A && git commit -m 'Phase 2 Batch 2: tinymediamanager, autoscan, channeltube, dispatcharr → replicant' && git push"
echo ""
echo "╔════════════════════════════════════════════════════════════════════════╗"
echo "║ DISPATCHARR NOTES ║"
echo "╚════════════════════════════════════════════════════════════════════════╝"
echo ""
echo "Dispatcharr is configured to use gluetun's HTTP proxy for VPN protection:"
echo " - HTTP_PROXY=http://192.168.1.122:38888"
echo " - HTTPS_PROXY=http://192.168.1.122:38888"
echo ""
echo "This routes HTTP/HTTPS traffic through the VPN while allowing dispatcharr"
echo "to run on replicant with its own network stack and direct port access."
echo ""
echo "GPU Status:"
echo " - /dev/dri is passed through (Intel QuickSync on pve-z620)"
echo " - FirePro V3900 = decode-only (no hardware encoding)"
echo " - NVIDIA env vars set for future pve-alien migration"
echo " - Will use CPU transcoding until GTX 1070 is available"
echo ""

View file

@ -1,31 +1,52 @@
#!/bin/bash #!/bin/bash
# Sync dyno.yml from Hetzner host "im" # Sync dyno.yml from Hetzner host "im"
# Source: im:/matrix/traefik/config/dyno.yml # Runs via cron every 5 minutes
# Destination: ~/scripts/configs/dyno.yml # Auto-commits and pushes if changes detected
set -e SCRIPTS_DIR=~/scripts
DEST_FILE="$SCRIPTS_DIR/configs/dyno.yml"
LOG_FILE="$SCRIPTS_DIR/logs/sync-dyno.log"
DEST_DIR=~/scripts/configs # Ensure dirs exist
DEST_FILE="$DEST_DIR/dyno.yml" mkdir -p "$SCRIPTS_DIR/configs"
mkdir -p "$SCRIPTS_DIR/logs"
mkdir -p "$DEST_DIR" # Logging function
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >> "$LOG_FILE"
}
echo "Syncing dyno.yml from im..." # Rotate log if > 1MB
rsync -avP im:/matrix/traefik/config/dyno.yml "$DEST_FILE" if [ -f "$LOG_FILE" ] && [ $(stat -c%s "$LOG_FILE" 2>/dev/null || echo 0) -gt 1048576 ]; then
mv "$LOG_FILE" "$LOG_FILE.old"
fi
echo "" # Sync file
echo "✅ Synced to: $DEST_FILE" if ! rsync -az im:/matrix/traefik/config/dyno.yml "$DEST_FILE" 2>> "$LOG_FILE"; then
echo "" log "ERROR: rsync failed"
exit 1
fi
# Show diff if in git repo # Check for changes and auto-commit/push
if [ -d ~/scripts/.git ]; then cd "$SCRIPTS_DIR"
cd ~/scripts if [ -d .git ]; then
if ! git diff --quiet configs/dyno.yml 2>/dev/null; then if ! git diff --quiet configs/dyno.yml 2>/dev/null; then
echo "Changes detected:" log "Changes detected in dyno.yml"
git diff configs/dyno.yml
echo "" # Auto-commit
echo "To commit: cd ~/scripts && git add -A && git commit -m 'Update dyno.yml'" git add configs/dyno.yml
git commit -m "Auto-sync dyno.yml $(date '+%Y-%m-%d %H:%M')" >> "$LOG_FILE" 2>&1
log "Committed changes"
# Auto-push (if remote configured)
if git remote | grep -q origin; then
if git push origin main >> "$LOG_FILE" 2>&1; then
log "Pushed to origin"
else else
echo "No changes from last sync." log "ERROR: Push failed"
fi
else
log "No remote configured, skipping push"
fi
fi fi
fi fi