Use native Alpine package manager instead of custom one
This commit is contained in:
parent
6b306390b3
commit
49b0296967
@ -8,11 +8,5 @@
|
||||
"adminpwd": "${ADMINPWD}",
|
||||
"domain": "spotter.vm",
|
||||
"port": "443"
|
||||
},
|
||||
"packages": {},
|
||||
"repo": {
|
||||
"pwd": "",
|
||||
"url": "https://dl.dasm.cz/spotter-repo",
|
||||
"user": ""
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +0,0 @@
|
||||
-----BEGIN PUBLIC KEY-----
|
||||
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEWJXH4Qm0kt2L86sntQH+C1zOJNQ0qMRt
|
||||
0vx4krTxRs9HQTQYAy//JC92ea2aKleA8OL0JF90b1NYXcQCWdAS+vE/ng9IEAii
|
||||
8C2+5nfuFeZ5YUjbQhfFblwHSM0c7hEG
|
||||
-----END PUBLIC KEY-----
|
@ -31,6 +31,7 @@ parser_unregister_container.set_defaults(action='unregister-container')
|
||||
parser_register_proxy = subparsers.add_parser('register-proxy')
|
||||
parser_register_proxy.set_defaults(action='register-proxy')
|
||||
parser_register_proxy.add_argument('app', help='Application name')
|
||||
parser_register_proxy.add_argument('host', help='Application subdomain')
|
||||
|
||||
parser_unregister_proxy = subparsers.add_parser('unregister-proxy')
|
||||
parser_unregister_proxy.set_defaults(action='unregister-proxy')
|
||||
@ -57,7 +58,7 @@ elif args.action == 'unregister-container':
|
||||
lxcmgr.unregister_container()
|
||||
elif args.action == 'register-proxy':
|
||||
# Used in init scripts
|
||||
lxcmgr.register_proxy(args.app)
|
||||
lxcmgr.register_proxy(args.app, args.host)
|
||||
elif args.action == 'unregister-proxy':
|
||||
# Used in init scripts
|
||||
lxcmgr.unregister_proxy(args.app)
|
||||
|
@ -3,7 +3,6 @@
|
||||
from .appmgr import AppMgr
|
||||
from .config import Config
|
||||
from .lxcmgr import LXCMgr
|
||||
from .pkgmgr import PkgMgr
|
||||
from .vmmgr import VMMgr
|
||||
from .wsgiapp import WSGIApp
|
||||
|
||||
@ -11,7 +10,6 @@ __all__ = [
|
||||
'AppMgr',
|
||||
'Config',
|
||||
'LXCMgr',
|
||||
'PkgMgr',
|
||||
'VMMgr',
|
||||
'WSGIApp'
|
||||
]
|
||||
|
@ -1,16 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from .pkgmgr import InstallItem, PkgMgr
|
||||
class InstallItem:
|
||||
def __init__(self):
|
||||
self.bytes_total = 1
|
||||
self.bytes_downloaded = 0
|
||||
|
||||
LXC_ROOT = '/var/lib/lxc'
|
||||
@property
|
||||
def percent_downloaded(self):
|
||||
return round(self.bytes_downloaded / self.bytes_total * 100)
|
||||
|
||||
class AppMgr:
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.pkgmgr = PkgMgr(conf)
|
||||
self.online_packages = {}
|
||||
|
||||
def start_app(self, item):
|
||||
# Start the actual app service
|
||||
@ -55,19 +63,43 @@ class AppMgr:
|
||||
return os.path.exists(os.path.join('/etc/runlevels/default', app))
|
||||
|
||||
def install_app(self, item):
|
||||
# Main installation function. Wrapper for download, registration and install script
|
||||
# Main installation function. Wrapper for installation via native package manager
|
||||
item.data = InstallItem()
|
||||
self.pkgmgr.install_app(item.key, item.data)
|
||||
# Alpine apk provides machine-readable progress in bytes_downloaded/bytes_total format output to file descriptor of choice
|
||||
pipe_rfd, pipe_wfd = os.pipe()
|
||||
with subprocess.Popen(['apk', '--progress-fd', str(pipe_wfd), '--no-cache', 'add', 'vm-{}@vm'.format(item.key)], pass_fds=[pipe_wfd]) as p:
|
||||
while p.poll() == None:
|
||||
time.sleep(0.1)
|
||||
data = b''
|
||||
while True:
|
||||
chunk = os.read(pipe_rfd, 8192)
|
||||
data += chunk
|
||||
if len(chunk) < 8192:
|
||||
break
|
||||
progress = data.decode().splitlines()[-1].split('/')
|
||||
item.data.bytes_downloaded = progress[0]
|
||||
item.data.bytes_total = progress[1]
|
||||
os.close(pipe_rfd)
|
||||
os.close(pipe_wfd)
|
||||
|
||||
def uninstall_app(self, item):
|
||||
# Main uninstallation function. Wrapper for uninstall script, filesystem purge and unregistration
|
||||
# Main uninstallation function. Wrapper for uninstallation via native package manager
|
||||
app = item.key
|
||||
self.stop_app(item)
|
||||
if self.is_service_autostarted(app):
|
||||
self.update_app_autostart(app, False)
|
||||
if name in self.conf['apps']:
|
||||
del self.conf['apps'][name]
|
||||
self.pkgmgr.uninstall_app(app)
|
||||
subprocess.run(['apk', '--no-cache', 'del', 'vm-{}@vm'.format(app)])
|
||||
|
||||
def fetch_online_packages(self):
|
||||
# Fetches list of online packages
|
||||
repo = vmmgr.get_repo_settings()
|
||||
packages = requests.get('{}/packages'.format(repo['url']), auth=(repo['user'], repo['pwd']), timeout=5)
|
||||
if packages.status_code != 200:
|
||||
return packages.status_code
|
||||
self.online_packages = json.loads(packages.content)
|
||||
return 200
|
||||
|
||||
def get_services_deps(self):
|
||||
# Fisrt, build a dictionary of {app: [needs]}
|
||||
@ -96,13 +128,6 @@ class AppMgr:
|
||||
self.conf['common']['gmaps-api-key'] = gmaps_api_key
|
||||
self.conf.save()
|
||||
|
||||
def update_repo_settings(self, url, user, pwd):
|
||||
# Update lxc repository configuration
|
||||
self.conf['repo']['url'] = url
|
||||
self.conf['repo']['user'] = user
|
||||
self.conf['repo']['pwd'] = pwd
|
||||
self.conf.save()
|
||||
|
||||
def shutdown_vm(self):
|
||||
subprocess.run(['/sbin/poweroff'])
|
||||
|
||||
|
@ -3,8 +3,7 @@
|
||||
import fcntl
|
||||
import json
|
||||
|
||||
CONF_FILE = '/etc/vmmgr/config.json'
|
||||
LOCK_FILE = '/var/lock/vmmgr-config.lock'
|
||||
from .paths import CONF_FILE, CONF_LOCK
|
||||
|
||||
class Config:
|
||||
def __init__(self):
|
||||
@ -12,14 +11,14 @@ class Config:
|
||||
|
||||
def load(self):
|
||||
# Load configuration from file. Uses file lock as interprocess mutex
|
||||
with open(LOCK_FILE, 'w') as lock:
|
||||
with open(CONF_LOCK, 'w') as lock:
|
||||
fcntl.lockf(lock, fcntl.LOCK_EX)
|
||||
with open(CONF_FILE, 'r') as f:
|
||||
self.data = json.load(f)
|
||||
|
||||
def save(self):
|
||||
# Save configuration to a file. Uses file lock as interprocess mutex
|
||||
with open(LOCK_FILE, 'w') as lock:
|
||||
with open(CONF_LOCK, 'w') as lock:
|
||||
fcntl.lockf(lock, fcntl.LOCK_EX)
|
||||
with open(CONF_FILE, 'w') as f:
|
||||
json.dump(self.data, f, sort_keys=True, indent=4)
|
||||
|
@ -10,10 +10,9 @@ from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from cryptography.x509.oid import NameOID, ExtendedKeyUsageOID
|
||||
|
||||
CERT_PUB_FILE = '/etc/ssl/services.pem'
|
||||
CERT_KEY_FILE = '/etc/ssl/services.key'
|
||||
SIG_PUB_FILE = '/etc/vmmgr/packages.pub'
|
||||
ACME_CRON = '/etc/periodic/daily/acme-sh'
|
||||
from .paths import CERT_PUB_FILE, CERT_KEY_FILE, ACME_CRON
|
||||
|
||||
# TODO: Use old method without cryptography module?
|
||||
|
||||
def create_cert(domain):
|
||||
# Create selfsigned certificate with wildcard alternative subject name
|
||||
@ -58,11 +57,6 @@ def get_cert_info():
|
||||
data['method'] = 'selfsigned'
|
||||
return data
|
||||
|
||||
def verify_signature(file, signature):
|
||||
with open(SIG_PUB_FILE, 'rb') as f:
|
||||
pub_key = serialization.load_pem_public_key(f.read(), default_backend())
|
||||
pub_key.verify(signature, file, ec.ECDSA(hashes.SHA512()))
|
||||
|
||||
def adminpwd_hash(password):
|
||||
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
||||
|
||||
|
@ -6,8 +6,7 @@ import shutil
|
||||
import subprocess
|
||||
|
||||
from . import templates
|
||||
|
||||
NGINX_DIR = '/etc/nginx/conf.d'
|
||||
from .paths import HOSTS_FILE, HOSTS_LOCK, LXC_ROOT, NGINX_DIR
|
||||
|
||||
class LXCMgr:
|
||||
def __init__(self, conf):
|
||||
@ -25,7 +24,7 @@ class LXCMgr:
|
||||
def clean_ephemeral_layer(self, app):
|
||||
# Cleans containers ephemeral layer.
|
||||
# This is done early in the container start process, so the inode of the delta0 directory must remain unchanged
|
||||
layer = os.path.join('/var/lib/lxc', app, 'delta0')
|
||||
layer = os.path.join(LXC_ROOT, app, 'delta0')
|
||||
if os.path.exists(layer):
|
||||
for item in os.scandir(layer):
|
||||
shutil.rmtree(item.path) if item.is_dir() else os.unlink(item.path)
|
||||
@ -53,10 +52,10 @@ class LXCMgr:
|
||||
# Leases the first unused IP from range 172.17.0.0/16
|
||||
# Uses file lock as interprocess mutex
|
||||
ip = None
|
||||
with open('/var/lock/vmmgr-hosts.lock', 'w') as lock:
|
||||
with open(HOSTS_LOCK, 'w') as lock:
|
||||
fcntl.lockf(lock, fcntl.LOCK_EX)
|
||||
# Load all existing records
|
||||
with open('/etc/hosts', 'r') as f:
|
||||
with open(HOSTS_FILE, 'r') as f:
|
||||
leases = [l.strip().split(' ', 1) for l in f]
|
||||
# If this call is a request for lease, find the first unassigned IP
|
||||
if is_request:
|
||||
@ -70,7 +69,7 @@ class LXCMgr:
|
||||
else:
|
||||
leases = [l for l in leases if l[1] != app]
|
||||
# Write the contents back to the file
|
||||
with open('/etc/hosts', 'w') as f:
|
||||
with open(HOSTS_FILE, 'w') as f:
|
||||
for lease in leases:
|
||||
f.write('{} {}\n'.format(lease[0], lease[1]))
|
||||
return ip
|
||||
@ -86,10 +85,10 @@ class LXCMgr:
|
||||
setup_env['GMAPS_API_KEY'] = self.conf['common']['gmaps-api-key']
|
||||
subprocess.run([script], env=setup_env, check=True)
|
||||
|
||||
def register_proxy(self, app):
|
||||
def register_proxy(self, app, host):
|
||||
# Setup proxy configuration and reload nginx
|
||||
with open(os.path.join(NGINX_DIR, '{}.conf'.format(app)), 'w') as f:
|
||||
f.write(templates.NGINX.format(app=app, host=self.conf['packages'][app]['host'], domain=self.conf['host']['domain'], port=self.conf['host']['port']))
|
||||
f.write(templates.NGINX.format(app=app, host=host, domain=self.conf['host']['domain'], port=self.conf['host']['port']))
|
||||
self.reload_nginx()
|
||||
|
||||
def unregister_proxy(self, app):
|
||||
|
@ -6,6 +6,8 @@ import requests
|
||||
import socket
|
||||
import subprocess
|
||||
|
||||
from .paths import MYIP_URL, PING_URL
|
||||
|
||||
def compile_url(domain, port, proto='https'):
|
||||
port = '' if (proto == 'https' and port == '443') or (proto == 'http' and port == '80') else ':{}'.format(port)
|
||||
return '{}://{}{}'.format(proto, domain, port)
|
||||
@ -28,7 +30,7 @@ def get_external_ip(version):
|
||||
allowed_gai_family = requests.packages.urllib3.util.connection.allowed_gai_family
|
||||
try:
|
||||
requests.packages.urllib3.util.connection.allowed_gai_family = lambda: family
|
||||
return requests.get('https://tools.dasm.cz/myip.php', timeout=5).text
|
||||
return requests.get(MYIP_URL, timeout=5).text
|
||||
except:
|
||||
return None
|
||||
finally:
|
||||
@ -50,7 +52,7 @@ def resolve_ip(domain, qtype):
|
||||
|
||||
def ping_url(url):
|
||||
try:
|
||||
return requests.get('https://tools.dasm.cz/vm-ping.php', params={'url': url}, timeout=5).text == 'vm-pong'
|
||||
return requests.get(PING_URL, params={'url': url}, timeout=5).text == 'vm-pong'
|
||||
except requests.exceptions.Timeout:
|
||||
raise
|
||||
except:
|
||||
|
25
usr/lib/python3.6/vmmgr/paths.py
Normal file
25
usr/lib/python3.6/vmmgr/paths.py
Normal file
@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Config
|
||||
CONF_FILE = '/etc/vmmgr/config.json'
|
||||
CONF_LOCK = '/var/lock/vmmgr-config.lock'
|
||||
|
||||
# Crypto
|
||||
ACME_CRON = '/etc/periodic/daily/acme-sh'
|
||||
ACME_DIR = '/etc/acme.sh.d'
|
||||
CERT_KEY_FILE = '/etc/ssl/services.key'
|
||||
CERT_PUB_FILE = '/etc/ssl/services.pem'
|
||||
|
||||
# LXC
|
||||
HOSTS_FILE = '/etc/hosts'
|
||||
HOSTS_LOCK = '/var/lock/vmmgr-hosts.lock'
|
||||
LXC_ROOT = '/var/lib/lxc'
|
||||
|
||||
# OS
|
||||
ISSUE_FILE = '/etc/issue'
|
||||
NGINX_DIR = '/etc/nginx/conf.d'
|
||||
REPO_FILE = '/etc/apk/repositories'
|
||||
|
||||
# URLs
|
||||
MYUP_URL = 'https://tools.dasm.cz/myip.php'
|
||||
PING_URL = 'https://tools.dasm.cz/vm-ping.php'
|
@ -1,180 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
|
||||
from . import crypto
|
||||
|
||||
LXC_ROOT = '/var/lib/lxc'
|
||||
|
||||
STAGE_DOWNLOAD = 0
|
||||
STAGE_INSTALL_DEPS = 1
|
||||
STAGE_INSTALL_APP = 2
|
||||
|
||||
class InstallItem:
|
||||
def __init__(self):
|
||||
self.stage = STAGE_DOWNLOAD
|
||||
self.bytes_total = 1
|
||||
self.bytes_downloaded = 0
|
||||
|
||||
@property
|
||||
def percent_downloaded(self):
|
||||
# Limit the displayed percentage to 0 - 99
|
||||
return min(99, round(self.bytes_downloaded / self.bytes_total * 100))
|
||||
|
||||
class PkgMgr:
|
||||
def __init__(self, conf):
|
||||
self.repo_url = repo_url
|
||||
self.conf = conf
|
||||
self.online_packages = {}
|
||||
|
||||
def get_repo_resource(self, resource_url, stream=False):
|
||||
return requests.get('{}/{}'.format(self.repo_url, resource_url), auth=self.repo_auth, timeout=5, stream=stream)
|
||||
|
||||
def fetch_online_packages(self):
|
||||
# Fetches and verifies online packages. Can raise InvalidSignature
|
||||
packages = self.get_repo_resource('packages')
|
||||
if packages.status_code != 200:
|
||||
return packages.status_code
|
||||
packages = packages.content
|
||||
packages_sig = self.get_repo_resource('packages.sig').content
|
||||
crypto.verify_signature(packages, packages_sig)
|
||||
self.online_packages = json.loads(packages)
|
||||
return 200
|
||||
|
||||
def install_app(self, app, item):
|
||||
# Main installation function. Wrapper for download, registration and install script
|
||||
self.fetch_online_packages()
|
||||
# Clean packages which previously failed to install
|
||||
self.clean_pending_packages()
|
||||
# Get all packages on which the app depends and which have not been installed yet
|
||||
deps = [d for d in self.get_install_deps(app) if d not in self.conf['packages'] or 'pending' in self.conf['packages'][d]]
|
||||
item.bytes_total = sum(self.online_packages[d]['size'] for d in deps)
|
||||
for dep in deps:
|
||||
self.download_package(dep, item)
|
||||
for dep in deps:
|
||||
# Set stage to INSTALLING_DEPS or INSTALLING based on which backage in sequence is being installed
|
||||
item.stage = STAGE_INSTALL_APP if dep == deps[-1] else STAGE_INSTALL_DEPS
|
||||
# Purge old data before unpacking to clean previous failed installation
|
||||
self.purge_package(dep)
|
||||
self.unpack_package(dep)
|
||||
# Run uninstall script before installation to clean previous failed installation
|
||||
self.run_uninstall_script(dep)
|
||||
self.register_package(dep)
|
||||
self.run_install_script(dep)
|
||||
|
||||
def uninstall_app(self, app):
|
||||
# Main uninstallation function. Wrapper for uninstall script, filesystem purge and unregistration
|
||||
deps = self.get_install_deps(app, False)[::-1]
|
||||
for dep in deps:
|
||||
if dep not in self.get_uninstall_deps():
|
||||
self.run_uninstall_script(dep)
|
||||
self.purge_package(dep)
|
||||
self.unregister_package(dep)
|
||||
|
||||
def download_package(self, name, item):
|
||||
tmp_archive = '/tmp/{}.tar.xz'.format(name)
|
||||
r = self.get_repo_resource('{}.tar.xz'.format(name), True)
|
||||
with open(tmp_archive, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=65536):
|
||||
if chunk:
|
||||
item.bytes_downloaded += f.write(chunk)
|
||||
# Verify hash
|
||||
if self.online_packages[name]['sha512'] != self.hash_file(tmp_archive):
|
||||
raise InvalidSignature(name)
|
||||
|
||||
def hash_file(self, file_path):
|
||||
sha512 = hashlib.sha512()
|
||||
with open(file_path, 'rb') as f:
|
||||
while True:
|
||||
data = f.read(65536)
|
||||
if not data:
|
||||
break
|
||||
sha512.update(data)
|
||||
return sha512.hexdigest()
|
||||
|
||||
def unpack_package(self, name):
|
||||
# Unpack archive
|
||||
tmp_archive = '/tmp/{}.tar.xz'.format(name)
|
||||
subprocess.run(['tar', 'xJf', tmp_archive], cwd='/', check=True)
|
||||
os.unlink(tmp_archive)
|
||||
|
||||
def purge_package(self, name):
|
||||
# Removes package and shared data from filesystem
|
||||
lxcpath = self.conf['packages'][name]['lxcpath'] if name in self.conf['packages'] else self.online_packages[name]['lxcpath']
|
||||
lxc_dir = os.path.join(LXC_ROOT, lxcpath)
|
||||
if os.path.exists(lxc_dir):
|
||||
shutil.rmtree(lxc_dir)
|
||||
srv_dir = os.path.join('/srv/', name)
|
||||
if os.path.exists(srv_dir):
|
||||
shutil.rmtree(srv_dir)
|
||||
lxc_log = '/var/log/lxc/{}.log'.format(name)
|
||||
if os.path.exists(lxc_log):
|
||||
os.unlink(lxc_log)
|
||||
|
||||
def register_package(self, name):
|
||||
# Registers a package in installed packages
|
||||
metadata = self.online_packages[name].copy()
|
||||
del metadata['sha512']
|
||||
del metadata['size']
|
||||
metadata['pending'] = True
|
||||
self.conf['packages'][name] = metadata
|
||||
self.conf.save()
|
||||
|
||||
def unregister_package(self, name):
|
||||
# Removes a package from installed packages
|
||||
del self.conf['packages'][name]
|
||||
self.conf.save()
|
||||
|
||||
def clean_pending_packages(self):
|
||||
# Remove registered packages with pending flag set from previously failed installation
|
||||
for name in self.conf['packages'].copy():
|
||||
if 'pending' in self.conf['packages'][name]:
|
||||
self.unregister_package(name)
|
||||
self.conf.save()
|
||||
|
||||
def run_install_script(self, name):
|
||||
# Runs install.sh for a package, if the script is present
|
||||
install_dir = os.path.join('/srv/', name, 'install')
|
||||
install_script = os.path.join('/srv/', name, 'install.sh')
|
||||
if os.path.exists(install_script):
|
||||
subprocess.run(install_script, check=True)
|
||||
os.unlink(install_script)
|
||||
if os.path.exists(install_dir):
|
||||
shutil.rmtree(install_dir)
|
||||
# Reload config to reflect whatever vmmgr register-app from the install script has written in it
|
||||
self.conf.load()
|
||||
del self.conf['packages'][name]['pending']
|
||||
self.conf.save()
|
||||
|
||||
def run_uninstall_script(self, name):
|
||||
# Runs uninstall.sh for a package, if the script is present
|
||||
uninstall_script = os.path.join('/srv/', name, 'uninstall.sh')
|
||||
if os.path.exists(uninstall_script):
|
||||
subprocess.run(uninstall_script, check=True)
|
||||
|
||||
def get_install_deps(self, name, online=True):
|
||||
# Flatten dependency tree for a package while preserving the dependency order
|
||||
packages = self.online_packages if online else self.conf['packages']
|
||||
deps = packages[name]['deps'].copy()
|
||||
for dep in deps[::-1]:
|
||||
deps[:0] = [d for d in self.get_install_deps(dep, online)]
|
||||
deps = list(dict.fromkeys(deps + [name]))
|
||||
return deps
|
||||
|
||||
def get_uninstall_deps(self):
|
||||
# Create reverse dependency tree for all installed packages
|
||||
deps = {}
|
||||
for name in self.conf['packages'].copy():
|
||||
for d in self.conf['packages'][name]['deps']:
|
||||
deps.setdefault(d, []).append(name)
|
||||
return deps
|
@ -112,3 +112,9 @@ ISSUE = '''
|
||||
- \x1b[1m{url}\x1b[0m
|
||||
- \x1b[1m{ip}\x1b[0m\x1b[?1c
|
||||
'''
|
||||
|
||||
REPOSITORIES = '''
|
||||
http://dl-cdn.alpinelinux.org/alpine/v3.9/main
|
||||
http://dl-cdn.alpinelinux.org/alpine/v3.9/community
|
||||
@vm {url}
|
||||
'''
|
||||
|
@ -21,10 +21,11 @@ def is_valid_email(email):
|
||||
parts = email.split('@')
|
||||
return len(parts) == 2 and bool(box_re.match(parts[0])) and bool(domain_re.match(parts[1]))
|
||||
|
||||
def is_valid_url(url):
|
||||
def is_valid_repo_url(url):
|
||||
# Check if URL is valid http(s) and doesn't contain extra parts
|
||||
try:
|
||||
parsed = urlparse(url)
|
||||
return parsed.scheme in ('http', 'https')
|
||||
return parsed.scheme in ('http', 'https') and not parsed.parameters and not parsed.query and not parsed.fragment
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
@ -3,14 +3,12 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import urllib
|
||||
|
||||
from . import crypto
|
||||
from . import templates
|
||||
from . import net
|
||||
|
||||
ISSUE_FILE = '/etc/issue'
|
||||
NGINX_DIR = '/etc/nginx/conf.d'
|
||||
ACME_CRON = '/etc/periodic/daily/acme-sh'
|
||||
from .paths import ACME_CRON, ACME_DIR, ISSUE_FILE, NGINX_DIR, REPO_FILE
|
||||
|
||||
class VMMgr:
|
||||
def __init__(self, conf):
|
||||
@ -59,6 +57,30 @@ class VMMgr:
|
||||
# Save config to file
|
||||
self.conf.save()
|
||||
|
||||
def get_repo_settings(self):
|
||||
# Read, parse and return current @vm repository configuration
|
||||
with open(REPO_FILE) as f:
|
||||
url = [l for l in f.read().splitlines() if l.startswith('@vm')][0].split(' ', 2)[1]
|
||||
url = urllib.parse.urlparse(url)
|
||||
return {'url': '{}://{}{}'.format(url.scheme, url.netloc, url.path),
|
||||
'user': url.username,
|
||||
'pwd': url.password}
|
||||
|
||||
def set_repo_settings(self, url, user, pwd):
|
||||
# Update @vm repository configuration
|
||||
url = urllib.parse.urlparse(url)
|
||||
# Create URL with username and password
|
||||
repo_url = [url.scheme, '://']
|
||||
if user:
|
||||
repo_url.append(urllib.quote(user, safe=''))
|
||||
if pwd:
|
||||
repo_url.extend((':', urllib.quote(pwd, safe='')))
|
||||
repo_url.append('@')
|
||||
repo_url.extend((url.netloc, url.path))
|
||||
# Update URL in repositories file
|
||||
with open(REPO_FILE, 'w') as f:
|
||||
f.write(templates.REPOSITORIES.format(url=''.join(repo_url)))
|
||||
|
||||
def create_selfsigned_cert(self):
|
||||
# Disable acme.sh cronjob
|
||||
os.chmod(ACME_CRON, 0o640)
|
||||
@ -69,16 +91,16 @@ class VMMgr:
|
||||
|
||||
def request_acme_cert(self):
|
||||
# Remove all possible conflicting certificates requested in the past
|
||||
certs = [i for i in os.listdir('/etc/acme.sh.d') if i not in ('account.conf', 'ca', 'http.header')]
|
||||
certs = [i for i in os.listdir(ACME_DIR) if i not in ('account.conf', 'ca', 'http.header')]
|
||||
for cert in certs:
|
||||
if cert != self.domain:
|
||||
subprocess.run(['/usr/bin/acme.sh', '--remove', '-d', cert])
|
||||
# Compile an acme.sh command for certificate requisition only if the certificate hasn't been requested before
|
||||
if not os.path.exists(os.path.join('/etc/acme.sh.d', self.domain)):
|
||||
if not os.path.exists(os.path.join(ACME_DIR, self.domain)):
|
||||
cmd = ['/usr/bin/acme.sh', '--issue', '-d', self.domain]
|
||||
for app in self.conf['apps'].copy():
|
||||
cmd += ['-d', '{}.{}'.format(self.conf['packages'][app]['host'], self.domain)]
|
||||
cmd += ['-w', '/etc/acme.sh.d']
|
||||
cmd += ['-d', '{}.{}'.format(self.conf['apps'][app]['host'], self.domain)]
|
||||
cmd += ['-w', ACME_DIR]
|
||||
# Request the certificate
|
||||
subprocess.run(cmd, check=True)
|
||||
# Otherwise just try to renew
|
||||
|
@ -29,8 +29,6 @@ class WSGIApp:
|
||||
self.vmmgr = VMMgr(self.conf)
|
||||
self.appmgr = AppMgr(self.conf)
|
||||
self.queue = ActionQueue()
|
||||
# Clean broken and interrupted installations in case of unclean previous shutdown
|
||||
self.appmgr.clean_pending_packages()
|
||||
self.jinja_env = Environment(loader=FileSystemLoader('/usr/share/vmmgr/templates'), autoescape=True, lstrip_blocks=True, trim_blocks=True)
|
||||
self.jinja_env.globals.update(is_app_visible=self.is_app_visible)
|
||||
self.url_map = Map((
|
||||
@ -176,7 +174,7 @@ class WSGIApp:
|
||||
app_data = {}
|
||||
for app in actionable_apps:
|
||||
installed = app in self.conf['apps']
|
||||
title = self.conf['packages'][app]['title'] if installed else self.appmgr.online_packages[app]['title']
|
||||
title = self.conf['apps'][app]['title'] if installed else self.appmgr.online_packages[app]['title']
|
||||
visible = self.conf['apps'][app]['visible'] if installed else False
|
||||
autostarted = self.appmgr.is_service_autostarted(app) if installed else False
|
||||
if app in pending_actions:
|
||||
@ -248,7 +246,7 @@ class WSGIApp:
|
||||
|
||||
def verify_dns_action(self, request):
|
||||
# Check if all FQDNs for all applications are resolvable and point to current external IP
|
||||
domains = [self.vmmgr.domain]+['{}.{}'.format(self.conf['packages'][app]['host'], self.vmmgr.domain) for app in self.conf['apps']]
|
||||
domains = [self.vmmgr.domain]+['{}.{}'.format(self.conf['apps'][app]['host'], self.vmmgr.domain) for app in self.conf['apps']]
|
||||
ipv4 = net.get_external_ip(4)
|
||||
ipv6 = net.get_external_ip(6)
|
||||
for domain in domains:
|
||||
@ -269,7 +267,7 @@ class WSGIApp:
|
||||
# Check if all applications are accessible from the internet using 3rd party ping service
|
||||
proto = kwargs['proto']
|
||||
port = self.vmmgr.port if proto == 'https' else '80'
|
||||
domains = [self.vmmgr.domain]+['{}.{}'.format(self.conf['packages'][app]['host'], self.vmmgr.domain) for app in self.conf['apps']]
|
||||
domains = [self.vmmgr.domain]+['{}.{}'.format(self.conf['apps'][app]['host'], self.vmmgr.domain) for app in self.conf['apps']]
|
||||
for domain in domains:
|
||||
url = net.compile_url(domain, port, proto)
|
||||
try:
|
||||
@ -313,10 +311,10 @@ class WSGIApp:
|
||||
def update_repo_action(self, request):
|
||||
# Update repository URL and credentials
|
||||
url = request.form['repourl']
|
||||
if not validator.is_valid_url(url):
|
||||
if not validator.is_valid_repo_url(url):
|
||||
request.session['msg'] = 'repo:error:{}'.format(request.session.lang.invalid_url(request.form['repourl']))
|
||||
else:
|
||||
self.appmgr.update_repo_settings(url, request.form['repousername'], request.form['repopassword'])
|
||||
self.vmmgr.update_repo_settings(url, request.form['repousername'], request.form['repopassword'])
|
||||
request.session['msg'] = 'repo:info:{}'.format(request.session.lang.repo_updated())
|
||||
return redirect('/setup-apps')
|
||||
|
||||
|
@ -39,7 +39,7 @@
|
||||
<ul style="column-count:3">
|
||||
<li>{{ conf['host']['domain'] }}</li>
|
||||
{% for app in conf['apps']|sort %}
|
||||
<li>{{ conf['packages'][app]['host'] }}.{{ conf['host']['domain'] }}</li>
|
||||
<li>{{ conf['apps'][app]['host'] }}.{{ conf['host']['domain'] }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
<input type="button" id="verify-dns" value="Ověřit nastavení DNS">
|
||||
|
Loading…
Reference in New Issue
Block a user